import warnings
warnings.filterwarnings('ignore')
Configuration de MLFlow avec tracking local¶
# La version utilisée initialement de mlflow 2.17.0 n'est plus compatible
# ! pip install mlflow==2.17.0 --use-deprecated=legacy-resolver
import mlflow
# Asignation du serveur de tracking MLFlow
mlflow.set_tracking_uri("http://localhost:5000")
# Création de l'expérimentation "approche classique"
mlflow.set_experiment("approche_classique")
<Experiment: artifact_location='mlflow-artifacts:/374735653194037029', creation_time=1729108772155, experiment_id='374735653194037029', last_update_time=1729108772155, lifecycle_stage='active', name='approche_classique', tags={}>
Approche classique: embeddings de mots¶
import pandas as pd
train_df = pd.read_csv("./data/train_df.csv")
test_df = pd.read_csv("./data/test_df.csv")
val_df = pd.read_csv("./data/val_df.csv")
train_df.shape
(3395, 15)
val_df.shape
(1456, 15)
test_df.shape
(1617, 15)
y_train = train_df["target"]
y_test = test_df["target"]
y_val = val_df["target"]
Embeddings de comptage des mots¶
En réalisant les embeddings de façon indépendante sur le jeu de train et de test on s'assure qu'il n'y a pas de fuite de données et que l'appréciation de performance du modèle ne sera pas biaisée. Cependant il y a un fort risque d'avoir des OOV lors du test.
train_df .head()
| target | ids | date | flag | user | text | sentiment_score | cleaned_text | tokenized | preprocessed_text | preprocessed_tokenized | length_text | length_tokenized | length_preprocessed_tokenized | sia_sentiment | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0 | 1977563166 | Sat May 30 21:13:33 PDT 2009 | NO_QUERY | IamTam | Redboxing it with the crew. The smell of deet ... | 4 | Red boxing it with the crew. The smell of DEET... | ['Red', 'boxing', 'it', 'with', 'the', 'crew',... | red boxing crew smell deet going overwhelming... | ['red', 'boxing', 'crew', 'smell', 'deet', 'go... | 98 | 21 | 8 | 0.39715 |
| 1 | 0 | 2069918369 | Sun Jun 07 16:46:09 PDT 2009 | NO_QUERY | eyeDu | Just wrapped! shouts to @marcclark and the cre... | 4 | Just wrapped! Shouts to <mention> and the crew... | ['Just', 'wrapped', '!', 'Shouts', 'to', '<men... | wrapped ! shout <mention> crew ! shot sum new ... | ['wrapped', '!', 'shout', '<mention>', 'crew',... | 131 | 24 | 14 | 0.50000 |
| 2 | 0 | 2190833602 | Tue Jun 16 03:51:13 PDT 2009 | NO_QUERY | prispatel | thinking whether should return Ji Herng his lu... | 4 | Thinking whether you should return Ji Herne hi... | ['Thinking', 'whether', 'you', 'should', 'retu... | thinking whether return ji herne luggage keep... | ['thinking', 'whether', 'return', 'ji', 'herne... | 85 | 18 | 9 | 0.50000 |
| 3 | 0 | 2044695036 | Fri Jun 05 09:42:05 PDT 2009 | NO_QUERY | KevinDelano | en route to Baltimore airport to pick up Molly | 4 | En route to Baltimore airport to pick up Molly | ['En', 'route', 'to', 'Baltimore', 'airport', ... | en route baltimore airport pick molly | ['en', 'route', 'baltimore', 'airport', 'pick'... | 47 | 9 | 6 | 0.50000 |
| 4 | 0 | 1994165464 | Mon Jun 01 11:30:57 PDT 2009 | NO_QUERY | KellyMcEwen | @emmacandlish re-reading new moon. decided im ... | 4 | <mention> re-reading new moon. Decided I am ta... | ['<mention>', 're-reading', 'new', 'moon', '.'... | <mention> rereading new moon decided taking c... | ['<mention>', 'rereading', 'new', 'moon', 'dec... | 115 | 25 | 13 | 0.22635 |
CountVectorizer¶
from sklearn.feature_extraction.text import CountVectorizer
count_vectorizer = CountVectorizer()
train_count_sparse = count_vectorizer.fit_transform(train_df['preprocessed_text'])
val_count_sparse = count_vectorizer.transform(val_df['preprocessed_text'])
test_count_sparse = count_vectorizer.transform(test_df['preprocessed_text'])
# # Précaution à prendre avec les matrices creuses
# from sklearn.preprocessing import StandardScaler
# scaler = StandardScaler(with_mean=False)
# train_count_scaled = scaler.fit_transform(train_count_sparse)
# test_count_scaled = scaler.fit_transform(test_count_sparse)
# Compactage des matrices creuses
import scipy.sparse
train_count = pd.DataFrame(train_count_sparse.toarray(), columns=count_vectorizer.get_feature_names_out())
val_count = pd.DataFrame(val_count_sparse.toarray(), columns=count_vectorizer.get_feature_names_out())
test_count = pd.DataFrame(test_count_sparse.toarray(), columns=count_vectorizer.get_feature_names_out())
from scipy.sparse import csr_matrix, hstack
train_count["target"] = y_train.values
val_count["target"] = y_val.values
test_count["target"] = y_test.values
Modélisation à partir des embeddings de mots : test rapides avec Pycaret¶
# important pycaret supporte python 3.9 à 3.11, attention à utiliser plutôt 3.9 pour éviter les conflits avec MLFlow et Pycaret
import sys
print(sys.version)
3.9.20 (main, Oct 3 2024, 07:38:01) [MSC v.1929 64 bit (AMD64)]
# ! pip install pycaret 3.3.2
# ! pip install pycaret[full]
import pycaret
from pycaret.classification import *
s = setup(
data=train_count,
target='target',
test_data=val_count,
fold_strategy='stratifiedkfold',
fold=10,
session_id=123,
index=False,
use_gpu=True,
log_experiment=True,
experiment_name="approche_classique",
experiment_custom_tags={'framework': 'pycaret', 'vectorizer': 'count vectorizer'}
)
[LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements
| Description | Value | |
|---|---|---|
| 0 | Session id | 123 |
| 1 | Target | target |
| 2 | Target type | Binary |
| 3 | Original data shape | (4851, 6069) |
| 4 | Transformed data shape | (4851, 6069) |
| 5 | Transformed train set shape | (3395, 6069) |
| 6 | Transformed test set shape | (1456, 6069) |
| 7 | Numeric features | 6068 |
| 8 | Preprocess | True |
| 9 | Imputation type | simple |
| 10 | Numeric imputation | mean |
| 11 | Categorical imputation | mode |
| 12 | Fold Generator | StratifiedKFold |
| 13 | Fold Number | 10 |
| 14 | CPU Jobs | -1 |
| 15 | Use GPU | True |
| 16 | Log Experiment | MlflowLogger |
| 17 | Experiment Name | approche_classique |
| 18 | USI | 4603 |
[LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements
# Vérification de l'expérimentation active
# Afficher l'expérimentation active
print(mlflow.get_experiment_by_name("approche_classique"))
# Afficher le statut du tracking URI (pour confirmer que MLFlow est bien configuré)
print(mlflow.get_tracking_uri())
<Experiment: artifact_location='mlflow-artifacts:/374735653194037029', creation_time=1729108772155, experiment_id='374735653194037029', last_update_time=1729108772155, lifecycle_stage='active', name='approche_classique', tags={}>
http://localhost:5000
# compare baseline models and returns the best 6 ones
best_models = compare_models(sort='Accuracy',n_select=8 ) #, include=["nb", "lr"]
| Model | Accuracy | AUC | Recall | Prec. | F1 | Kappa | MCC | TT (Sec) | |
|---|---|---|---|---|---|---|---|---|---|
| lr | Logistic Regression | 0.7131 | 0.7755 | 0.7044 | 0.7184 | 0.7109 | 0.4262 | 0.4268 | 1.0850 |
| rf | Random Forest Classifier | 0.7122 | 0.7857 | 0.6574 | 0.7400 | 0.6951 | 0.4246 | 0.4282 | 1.3680 |
| et | Extra Trees Classifier | 0.7069 | 0.7823 | 0.6627 | 0.7298 | 0.6932 | 0.4140 | 0.4171 | 1.9400 |
| lightgbm | Light Gradient Boosting Machine | 0.6990 | 0.7777 | 0.6744 | 0.7106 | 0.6912 | 0.3980 | 0.3993 | 0.8780 |
| ridge | Ridge Classifier | 0.6951 | 0.7409 | 0.6886 | 0.6996 | 0.6935 | 0.3903 | 0.3909 | 1.0950 |
| svm | SVM - Linear Kernel | 0.6845 | 0.7383 | 0.6933 | 0.6836 | 0.6877 | 0.3690 | 0.3698 | 1.4240 |
| gbc | Gradient Boosting Classifier | 0.6837 | 0.7662 | 0.7514 | 0.6627 | 0.7038 | 0.3671 | 0.3713 | 12.6630 |
| ada | Ada Boost Classifier | 0.6828 | 0.7556 | 0.5611 | 0.7528 | 0.6359 | 0.3659 | 0.3838 | 7.0250 |
| dt | Decision Tree Classifier | 0.6813 | 0.6839 | 0.6345 | 0.7008 | 0.6655 | 0.3627 | 0.3647 | 1.8810 |
| knn | K Neighbors Classifier | 0.6071 | 0.6590 | 0.5194 | 0.6322 | 0.5672 | 0.2145 | 0.2192 | 0.8270 |
| nb | Naive Bayes | 0.5467 | 0.5456 | 0.8026 | 0.5319 | 0.6397 | 0.0921 | 0.1068 | 0.8550 |
| qda | Quadratic Discriminant Analysis | 0.5461 | 0.5494 | 0.4405 | 0.5626 | 0.4835 | 0.0926 | 0.0975 | 6.8850 |
| lda | Linear Discriminant Analysis | 0.5393 | 0.5355 | 0.5235 | 0.5414 | 0.5315 | 0.0787 | 0.0789 | 13.5070 |
| dummy | Dummy Classifier | 0.5013 | 0.5000 | 1.0000 | 0.5013 | 0.6678 | 0.0000 | 0.0000 | 0.5140 |
2024/12/06 12:24:34 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:24:34 INFO mlflow.tracking._tracking_service.client: 🏃 View run Logistic Regression at: http://localhost:5000/#/experiments/374735653194037029/runs/22f672d517dd421e9564d496ed52526b. 2024/12/06 12:24:34 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 12:24:37 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:24:37 INFO mlflow.tracking._tracking_service.client: 🏃 View run Random Forest Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/13277c4b97e14bb39a8f24e45348a6cd. 2024/12/06 12:24:37 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 12:24:41 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:24:41 INFO mlflow.tracking._tracking_service.client: 🏃 View run Extra Trees Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/46e46008291b494d8d649a2dd4e207d8. 2024/12/06 12:24:41 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 12:24:44 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:24:44 INFO mlflow.tracking._tracking_service.client: 🏃 View run Light Gradient Boosting Machine at: http://localhost:5000/#/experiments/374735653194037029/runs/09af4de296254e79a4a266a613b2bf7c. 2024/12/06 12:24:44 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 12:24:46 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:24:46 INFO mlflow.tracking._tracking_service.client: 🏃 View run Ridge Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/8a4b1d07501f41d98a70100526004616. 2024/12/06 12:24:46 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 12:24:49 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:24:49 INFO mlflow.tracking._tracking_service.client: 🏃 View run SVM - Linear Kernel at: http://localhost:5000/#/experiments/374735653194037029/runs/bec5a0dc93144a5b9c29c80e927f526c. 2024/12/06 12:24:49 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 12:25:05 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:25:05 INFO mlflow.tracking._tracking_service.client: 🏃 View run Gradient Boosting Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/0a2b97679925445f94f6a6c451f1379b. 2024/12/06 12:25:05 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 12:25:16 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:25:16 INFO mlflow.tracking._tracking_service.client: 🏃 View run Ada Boost Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/4bc842ba9dc5460aa6025f3f34b36373. 2024/12/06 12:25:16 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 12:25:17 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:25:17 INFO mlflow.tracking._tracking_service.client: 🏃 View run Decision Tree Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/bd9b977e265144caa133b6bd59dc5db4. 2024/12/06 12:25:17 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 12:25:17 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:25:17 INFO mlflow.tracking._tracking_service.client: 🏃 View run K Neighbors Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/0fda6a2e440b4bb9a9bf241cde1bb604. 2024/12/06 12:25:17 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 12:25:18 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:25:18 INFO mlflow.tracking._tracking_service.client: 🏃 View run Naive Bayes at: http://localhost:5000/#/experiments/374735653194037029/runs/00667e795bf94bbfb3b910a5d1198ed5. 2024/12/06 12:25:18 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 12:25:19 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:25:19 INFO mlflow.tracking._tracking_service.client: 🏃 View run Quadratic Discriminant Analysis at: http://localhost:5000/#/experiments/374735653194037029/runs/c114de201f0b464099b10be949f4a887. 2024/12/06 12:25:19 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 12:25:19 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:25:19 INFO mlflow.tracking._tracking_service.client: 🏃 View run Linear Discriminant Analysis at: http://localhost:5000/#/experiments/374735653194037029/runs/fa8a7c5cafe0417ea4091606d1b5f3bf. 2024/12/06 12:25:19 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 12:25:20 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:25:20 INFO mlflow.tracking._tracking_service.client: 🏃 View run Dummy Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/56d55743500743d7a6c4d72f2b952cd1. 2024/12/06 12:25:20 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029.
# Sélection manuelle de modèles pour stacking
rf = best_models[1] # extra tree
lr = best_models[0] # logistic regression
gbc = best_models[6] # ridge
# Construire le modèle Stacked avec ces modèles (on peut aussi créer des modèles lr = create_model("lr"))
stacked_model = stack_models([rf, lr, gbc])
| Accuracy | AUC | Recall | Prec. | F1 | Kappa | MCC | |
|---|---|---|---|---|---|---|---|
| Fold | |||||||
| 0 | 0.7324 | 0.7983 | 0.7000 | 0.7484 | 0.7234 | 0.4647 | 0.4657 |
| 1 | 0.7088 | 0.7800 | 0.7294 | 0.7006 | 0.7147 | 0.4176 | 0.4180 |
| 2 | 0.7176 | 0.7932 | 0.6647 | 0.7434 | 0.7019 | 0.4353 | 0.4378 |
| 3 | 0.7441 | 0.8307 | 0.7661 | 0.7360 | 0.7507 | 0.4881 | 0.4885 |
| 4 | 0.7382 | 0.7766 | 0.6901 | 0.7662 | 0.7262 | 0.4768 | 0.4792 |
| 5 | 0.7021 | 0.7902 | 0.6412 | 0.7315 | 0.6834 | 0.4043 | 0.4075 |
| 6 | 0.7227 | 0.8197 | 0.7529 | 0.7111 | 0.7314 | 0.4453 | 0.4461 |
| 7 | 0.6873 | 0.7530 | 0.6471 | 0.7051 | 0.6748 | 0.3748 | 0.3761 |
| 8 | 0.6991 | 0.7971 | 0.7000 | 0.7000 | 0.7000 | 0.3982 | 0.3982 |
| 9 | 0.7198 | 0.8053 | 0.7059 | 0.7273 | 0.7164 | 0.4396 | 0.4398 |
| Mean | 0.7172 | 0.7944 | 0.6997 | 0.7270 | 0.7123 | 0.4345 | 0.4357 |
| Std | 0.0172 | 0.0209 | 0.0396 | 0.0213 | 0.0216 | 0.0343 | 0.0343 |
2024/12/06 12:52:05 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 12:52:05 INFO mlflow.tracking._tracking_service.client: 🏃 View run Stacking Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/7e08e91fd1304424aee3c7ec32387fc8. 2024/12/06 12:52:05 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029.
stacked_model
StackingClassifier(cv=5,
estimators=[('Random Forest Classifier',
RandomForestClassifier(bootstrap=True,
ccp_alpha=0.0,
class_weight=None,
criterion='gini',
max_depth=None,
max_features='sqrt',
max_leaf_nodes=None,
max_samples=None,
min_impurity_decrease=0.0,
min_samples_leaf=1,
min_samples_split=2,
min_weight_fraction_leaf=0.0,
monotonic_cst=None,
n_estimators=100...
validation_fraction=0.1,
verbose=0,
warm_start=False))],
final_estimator=LogisticRegression(C=1.0, class_weight=None,
dual=False,
fit_intercept=True,
intercept_scaling=1,
l1_ratio=None,
max_iter=1000,
multi_class='auto',
n_jobs=None, penalty='l2',
random_state=123,
solver='lbfgs',
tol=0.0001, verbose=0,
warm_start=False),
n_jobs=1, passthrough=False, stack_method='auto', verbose=0)In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
StackingClassifier(cv=5,
estimators=[('Random Forest Classifier',
RandomForestClassifier(bootstrap=True,
ccp_alpha=0.0,
class_weight=None,
criterion='gini',
max_depth=None,
max_features='sqrt',
max_leaf_nodes=None,
max_samples=None,
min_impurity_decrease=0.0,
min_samples_leaf=1,
min_samples_split=2,
min_weight_fraction_leaf=0.0,
monotonic_cst=None,
n_estimators=100...
validation_fraction=0.1,
verbose=0,
warm_start=False))],
final_estimator=LogisticRegression(C=1.0, class_weight=None,
dual=False,
fit_intercept=True,
intercept_scaling=1,
l1_ratio=None,
max_iter=1000,
multi_class='auto',
n_jobs=None, penalty='l2',
random_state=123,
solver='lbfgs',
tol=0.0001, verbose=0,
warm_start=False),
n_jobs=1, passthrough=False, stack_method='auto', verbose=0)RandomForestClassifier(n_jobs=-1, random_state=123)
LogisticRegression(max_iter=1000, random_state=123)
GradientBoostingClassifier(random_state=123)
LogisticRegression(max_iter=1000, random_state=123)
# plot confusion matrix
plot_model(stacked_model, plot = 'confusion_matrix', save=True)
'Confusion Matrix.png'
# Récupérer les runs actifs et identifier celui de "Stacking Classifier"
client = mlflow.tracking.MlflowClient()
experiment_id = client.get_experiment_by_name('approche_classique').experiment_id
# Lister les runs
runs = client.search_runs(experiment_id, filter_string="tags.mlflow.runName = 'Stacking Classifier'")
# Récupérer le run_id du premier run correspondant
run_id = runs[0].info.run_id
# Générer la matrice de confusion et la sauvegarder localement
plot_model(stacked_model, plot='confusion_matrix', save=True)
# Loguer l'image dans le run existant (nested car il est dans le run parent du setup)
mlflow.start_run(run_id=run_id, nested=True) # Démarrer sur le run existant
mlflow.log_artifact("Confusion Matrix.png") # Ajouter l'image comme artéfact
mlflow.end_run() # Terminer le run
2024/12/06 12:59:05 INFO mlflow.tracking._tracking_service.client: 🏃 View run Stacking Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/7e08e91fd1304424aee3c7ec32387fc8. 2024/12/06 12:59:05 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029.
plot_model(stacked_model, plot='confusion_matrix')
Le stacking combine les forces des différents modèles en donnant des performances homogènes.
Pour cet embedding nous allons aussi optimiser le meilleur des modèles. On optimise par rapport à l'accuracy car l'optimisation selon recall conduit à obtenir uniquement la classe 1. On sauvegarde dans MLFlow un modèle complet avec schéma de données et signature.
import mlflow
from mlflow.models.signature import infer_signature
from pycaret.classification import tune_model, predict_model, plot_model
import pandas as pd
# Démarrer un nouveau run MLflow séparé pour `tuned_model`
with mlflow.start_run(run_name="Best model CountVectorizer", nested=True) as run:
# Effectuer le tuning du modèle
tuned_model = tune_model(estimator=best_models[0],
optimize="Accuracy",
choose_better=True,
verbose=False)
# Générer des prédictions pour définir la signature et le schéma de données
pred_holdouts = predict_model(tuned_model, data=test_count)
input_data = test_count.drop("target", axis=1) # Données d'entrée sans la colonne cible
output_data = pred_holdouts # Données de sortie du modèle
signature = infer_signature(input_data, output_data)
# Logger le modèle tuné avec le schéma de données et la signature
mlflow.sklearn.log_model(
sk_model=tuned_model,
artifact_path="tuned_model",
signature=signature
)
# Logger les hyperparamètres et les métriques
mlflow.log_params(tuned_model.get_params()) # Hyperparamètres
accuracy = tuned_model.score(test_count.drop("target", axis=1), test_count["target"])
mlflow.log_metric("Accuracy", accuracy)
# Sauvegarder et loguer la matrice de confusion
plot_model(tuned_model, plot='confusion_matrix', save=True)
mlflow.log_artifact("Confusion Matrix.png") # Loguer comme artefact
# Sauvegarder et loguer l'AUC
plot_model(tuned_model, plot='auc', save=True)
mlflow.log_artifact("AUC.png") # Loguer comme artefact
# Sauvegarder et loguer le Boundary plot
plot_model(tuned_model, plot='boundary', save=True)
mlflow.log_artifact("Decision Boundary.png") # Loguer comme artefact
# Sauvegarder et loguer le classification report
plot_model(tuned_model, plot='class_report', save=True)
mlflow.log_artifact("Class Report.png") # Loguer comme artefact
print("Modèle, métriques et artefacts logués avec succès.")
2024/12/06 13:03:16 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 13:03:16 INFO mlflow.tracking._tracking_service.client: 🏃 View run Logistic Regression at: http://localhost:5000/#/experiments/374735653194037029/runs/82b6645b63d74fa196ff00168d9fd84f. 2024/12/06 13:03:16 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029.
| Model | Accuracy | AUC | Recall | Prec. | F1 | Kappa | MCC | |
|---|---|---|---|---|---|---|---|---|
| 0 | Logistic Regression | 0.7143 | 0.7886 | 0.7259 | 0.7101 | 0.7179 | 0.4285 | 0.4287 |
2024/12/06 13:03:33 INFO mlflow.tracking._tracking_service.client: 🏃 View run Best model CountVectorizer at: http://localhost:5000/#/experiments/374735653194037029/runs/be1287e6a2694deca124a7b36f0752d2. 2024/12/06 13:03:33 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029.
Modèle, métriques et artefacts logués avec succès.
tuned_model
# ou plot_model(tuned_model, plot="parameter")
LogisticRegression(C=0.472, class_weight='balanced', dual=False,
fit_intercept=True, intercept_scaling=1, l1_ratio=None,
max_iter=1000, multi_class='auto', n_jobs=None, penalty='l2',
random_state=123, solver='lbfgs', tol=0.0001, verbose=0,
warm_start=False)In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
LogisticRegression(C=0.472, class_weight='balanced', dual=False,
fit_intercept=True, intercept_scaling=1, l1_ratio=None,
max_iter=1000, multi_class='auto', n_jobs=None, penalty='l2',
random_state=123, solver='lbfgs', tol=0.0001, verbose=0,
warm_start=False)# plot confusion matrix
plot_model(tuned_model, plot = 'confusion_matrix')
plot_model(tuned_model, plot = 'auc')
plot_model(tuned_model, plot = 'boundary') # sans normalisation
plot_model(tuned_model, plot = 'class_report')
Le tuning a produit ce que était recherché: un bon recall avec une valeur d'accuary à 0.7. Le modèle tuné finalisé a un temps de calcul de l'ordre de la seconde.
# Finalize the model (train on the entire dataset)
finalize_model(tuned_model)
2024/12/06 13:08:06 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 13:08:06 INFO mlflow.tracking._tracking_service.client: 🏃 View run Logistic Regression at: http://localhost:5000/#/experiments/374735653194037029/runs/010c4e17963c4168814ef82e93066a70. 2024/12/06 13:08:06 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029.
Pipeline(memory=Memory(location=None),
steps=[('numerical_imputer',
TransformerWrapper(exclude=None,
include=['01', '032010', '0415', '0430',
'05', '0530', '0640hr', '09', '10',
'100', '10000', '10010', '100bt',
'100mi', '100mph', '100th',
'101130', '1017', '1022', '1030',
'1030pm', '104', '10411', '1045',
'106', '106npark', '10am',
'10hour', '10hrs', '10pm', ...],
transformer=Simp...
TransformerWrapper(exclude=None, include=None,
transformer=CleanColumnNames(match='[\\]\\[\\,\\{\\}\\"\\:]+'))),
('actual_estimator',
LogisticRegression(C=0.472, class_weight='balanced',
dual=False, fit_intercept=True,
intercept_scaling=1, l1_ratio=None,
max_iter=1000, multi_class='auto',
n_jobs=None, penalty='l2', random_state=123,
solver='lbfgs', tol=0.0001, verbose=0,
warm_start=False))],
verbose=False)In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
Pipeline(memory=Memory(location=None),
steps=[('numerical_imputer',
TransformerWrapper(exclude=None,
include=['01', '032010', '0415', '0430',
'05', '0530', '0640hr', '09', '10',
'100', '10000', '10010', '100bt',
'100mi', '100mph', '100th',
'101130', '1017', '1022', '1030',
'1030pm', '104', '10411', '1045',
'106', '106npark', '10am',
'10hour', '10hrs', '10pm', ...],
transformer=Simp...
TransformerWrapper(exclude=None, include=None,
transformer=CleanColumnNames(match='[\\]\\[\\,\\{\\}\\"\\:]+'))),
('actual_estimator',
LogisticRegression(C=0.472, class_weight='balanced',
dual=False, fit_intercept=True,
intercept_scaling=1, l1_ratio=None,
max_iter=1000, multi_class='auto',
n_jobs=None, penalty='l2', random_state=123,
solver='lbfgs', tol=0.0001, verbose=0,
warm_start=False))],
verbose=False)TransformerWrapper(exclude=None,
include=['01', '032010', '0415', '0430', '05', '0530',
'0640hr', '09', '10', '100', '10000', '10010',
'100bt', '100mi', '100mph', '100th', '101130',
'1017', '1022', '1030', '1030pm', '104', '10411',
'1045', '106', '106npark', '10am', '10hour',
'10hrs', '10pm', ...],
transformer=SimpleImputer(add_indicator=False, copy=True,
fill_value=None,
keep_empty_features=False,
missing_values=nan,
strategy='mean'))SimpleImputer()
SimpleImputer()
TransformerWrapper(exclude=None, include=[],
transformer=SimpleImputer(add_indicator=False, copy=True,
fill_value=None,
keep_empty_features=False,
missing_values=nan,
strategy='most_frequent'))SimpleImputer(strategy='most_frequent')
SimpleImputer(strategy='most_frequent')
TransformerWrapper(exclude=None, include=None,
transformer=CleanColumnNames(match='[\\]\\[\\,\\{\\}\\"\\:]+'))CleanColumnNames()
CleanColumnNames()
LogisticRegression(C=0.472, class_weight='balanced', max_iter=1000,
random_state=123)import os
from datetime import datetime
from pycaret.classification import save_model
# Définir le chemin du répertoire courant
PATH = os.getcwd() + os.sep
# Créer le sous-répertoire 'models' s'il n'existe pas
models_dir = PATH + "models"
if not os.path.exists(models_dir):
os.makedirs(models_dir)
# Définir l'heure actuelle pour ajouter au nom du modèle
current_time = datetime.now().strftime("%m-%d-%Y_%H-%M")
# Sauvegarder le modèle dans le répertoire 'models'
save_model(tuned_model, os.path.join(models_dir, "best_model_CountVectorizer_" + current_time))
# Charger le modèle sauvegardé
# loaded_bestmodel = load_model(os.path.join(models_dir, 'best_model_CountVectorizer_' + current_time))
Transformation Pipeline and Model Successfully Saved
(Pipeline(memory=Memory(location=None),
steps=[('numerical_imputer',
TransformerWrapper(exclude=None,
include=['0430', '09', '10', '10 hour',
'10 min', '100', '100 follower',
'106', '106 flight', '10pm', '11',
'1130', '11pm', '12', '12 crew',
'12 hour', '12 week', '125',
'12hour', '12hour flight', '13',
'13 hour', '130', '14', '15',
'15 min', '15 minute', '17',
'17th', '18', ...],
transformer=...
TransformerWrapper(exclude=None, include=None,
transformer=CleanColumnNames(match='[\\]\\[\\,\\{\\}\\"\\:]+'))),
('trained_model',
LogisticRegression(C=0.472, class_weight='balanced',
dual=False, fit_intercept=True,
intercept_scaling=1, l1_ratio=None,
max_iter=1000, multi_class='auto',
n_jobs=None, penalty='l2', random_state=123,
solver='lbfgs', tol=0.0001, verbose=0,
warm_start=False))],
verbose=False),
'C:\\Users\\cecil\\OneDrive\\Documents\\AI_Engineer\\7-Realisez_une_analyse_de_sentiments\\P7_WorkingDirectory\\models\\best_model_CountVectorizer_12-06-2024_14-35.pkl')
# Générer des prédictions pour le jeu de test (données jamais vues)
predictions = predict_model(tuned_model, data=test_count)
| Model | Accuracy | AUC | Recall | Prec. | F1 | Kappa | MCC | |
|---|---|---|---|---|---|---|---|---|
| 0 | Logistic Regression | 0.7143 | 0.7886 | 0.7259 | 0.7101 | 0.7179 | 0.4285 | 0.4287 |
df = predictions
Représentation 2D¶
! pip install seaborn
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.neighbors import NeighborhoodComponentsAnalysis
def visualize_nca_predictions(df, train_df):
"""
Visualise les prédictions d'un modèle en utilisant Neighborhood Components Analysis (NCA) en 2D.
Paramètres :
- df : DataFrame contenant les features, les vraies étiquettes ('target'), et les prédictions ('prediction_label').
- train_df : DataFrame contenant les textes d'origine pour les faux positifs et faux négatifs.
"""
# Séparer les features des labels
features = df.drop(columns=['target', 'prediction_label', 'prediction_score'])
true_labels = df['target']
predicted_labels = df['prediction_label']
# Appliquer Neighborhood Component Analysis (NCA) pour réduire à 2 dimensions
nca = NeighborhoodComponentsAnalysis(n_components=2, random_state=42)
nca_transformed = nca.fit_transform(features, true_labels)
# Création d'un DataFrame pour la visualisation
nca_df = pd.DataFrame(nca_transformed, columns=['NCA1', 'NCA2'])
nca_df['True Labels'] = true_labels
nca_df['Predicted Labels'] = predicted_labels
# Identifier les points mal classés
nca_df['Misclassified'] = nca_df['True Labels'] != nca_df['Predicted Labels']
# Identifier les faux positifs et les faux négatifs
nca_df['False Positive'] = (nca_df['True Labels'] == 0) & (nca_df['Predicted Labels'] == 1)
nca_df['False Negative'] = (nca_df['True Labels'] == 1) & (nca_df['Predicted Labels'] == 0)
# Retrouver les textes mal interprétés pour les faux positifs et les faux négatifs
false_positive_texts = train_df.loc[nca_df[nca_df['False Positive']].index, 'text']
false_negative_texts = train_df.loc[nca_df[nca_df['False Negative']].index, 'text']
# Créer une figure avec deux sous-graphes
plt.figure(figsize=(10, 5))
# Graphique 1 : Projection NCA avec les labels réels
plt.subplot(1, 2, 1)
sns.scatterplot(x='NCA1', y='NCA2', hue='True Labels', data=nca_df, palette={0: 'green', 1: 'orange'}, s=100)
plt.title('NCA Projection with True Labels')
# Graphique 2 : Projection NCA avec les labels prédits et les points mal classés
plt.subplot(1, 2, 2)
# Points correctement classés (cercles)
sns.scatterplot(x='NCA1', y='NCA2', hue='Predicted Labels', data=nca_df[~nca_df['Misclassified']],
palette={0: 'green', 1: 'orange'}, s=100, marker='o')
# Points mal classés (croix rouges pour 0 et noires pour 1)
sns.scatterplot(x='NCA1', y='NCA2', data=nca_df[nca_df['False Positive']],
color='red', s=60, marker='x', label='Misclassified 0', edgecolor='red', linewidth=2)
sns.scatterplot(x='NCA1', y='NCA2', data=nca_df[nca_df['False Negative']],
color='black', s=60, marker='x', label='Misclassified 1', edgecolor='black', linewidth=2)
plt.title('NCA Projection with Predicted Labels and Misclassified Points')
# Ajustement de la légende manuellement
plt.legend(title='Classification', loc='upper right')
# Afficher les graphes
plt.tight_layout()
plt.show()
# Afficher les textes mal interprétés par groupe
print("\nFaux Négatifs (Prédit comme 0, mais vrai label 1) :")
for text in false_negative_texts:
print(f"- {text}")
print("\nFaux Positifs (Prédit comme 1, mais vrai label 0) :")
for text in false_positive_texts:
print(f"- {text}")
# Exemple d'utilisation
visualize_nca_predictions(df, train_df)
Collecting seaborn Using cached seaborn-0.13.2-py3-none-any.whl.metadata (5.4 kB) Requirement already satisfied: numpy!=1.24.0,>=1.20 in c:\users\cecil\appdata\roaming\python\python39\site-packages (from seaborn) (1.26.4) Requirement already satisfied: pandas>=1.2 in c:\users\cecil\appdata\roaming\python\python39\site-packages (from seaborn) (2.1.4) Requirement already satisfied: matplotlib!=3.6.1,>=3.4 in c:\users\cecil\appdata\roaming\python\python39\site-packages (from seaborn) (3.7.5) Requirement already satisfied: contourpy>=1.0.1 in c:\users\cecil\appdata\roaming\python\python39\site-packages (from matplotlib!=3.6.1,>=3.4->seaborn) (1.3.0) Requirement already satisfied: cycler>=0.10 in c:\users\cecil\appdata\roaming\python\python39\site-packages (from matplotlib!=3.6.1,>=3.4->seaborn) (0.12.1) Requirement already satisfied: fonttools>=4.22.0 in c:\users\cecil\appdata\roaming\python\python39\site-packages (from matplotlib!=3.6.1,>=3.4->seaborn) (4.55.2) Requirement already satisfied: kiwisolver>=1.0.1 in c:\users\cecil\appdata\roaming\python\python39\site-packages (from matplotlib!=3.6.1,>=3.4->seaborn) (1.4.7) Requirement already satisfied: packaging>=20.0 in c:\programdata\anaconda3\envs\automl-env\lib\site-packages (from matplotlib!=3.6.1,>=3.4->seaborn) (24.1) Requirement already satisfied: pillow>=6.2.0 in c:\users\cecil\appdata\roaming\python\python39\site-packages (from matplotlib!=3.6.1,>=3.4->seaborn) (11.0.0) Requirement already satisfied: pyparsing>=2.3.1 in c:\users\cecil\appdata\roaming\python\python39\site-packages (from matplotlib!=3.6.1,>=3.4->seaborn) (3.2.0) Requirement already satisfied: python-dateutil>=2.7 in c:\programdata\anaconda3\envs\automl-env\lib\site-packages (from matplotlib!=3.6.1,>=3.4->seaborn) (2.9.0.post0) Requirement already satisfied: importlib-resources>=3.2.0 in c:\users\cecil\appdata\roaming\python\python39\site-packages (from matplotlib!=3.6.1,>=3.4->seaborn) (6.4.5) Requirement already satisfied: pytz>=2020.1 in c:\users\cecil\appdata\roaming\python\python39\site-packages (from pandas>=1.2->seaborn) (2024.2) Requirement already satisfied: tzdata>=2022.1 in c:\users\cecil\appdata\roaming\python\python39\site-packages (from pandas>=1.2->seaborn) (2024.2) Requirement already satisfied: zipp>=3.1.0 in c:\programdata\anaconda3\envs\automl-env\lib\site-packages (from importlib-resources>=3.2.0->matplotlib!=3.6.1,>=3.4->seaborn) (3.21.0) Requirement already satisfied: six>=1.5 in c:\programdata\anaconda3\envs\automl-env\lib\site-packages (from python-dateutil>=2.7->matplotlib!=3.6.1,>=3.4->seaborn) (1.16.0) Using cached seaborn-0.13.2-py3-none-any.whl (294 kB) Installing collected packages: seaborn Successfully installed seaborn-0.13.2
Faux Négatifs (Prédit comme 0, mais vrai label 1) : - @emmacandlish re-reading new moon. decided im taking eclips on the plane, so i wanna read new moon before it . LOL - The airport keeps taking people away from me. - Sitting in the airport, just got off the phone with Verizon to have them fix my phone! Will be home soon!!! - Just landed in Florida!! Now another flight to puerto rico! But it's two hours long this time - Did anyone have issues with their 360 ghostbusters flight suit code not working? #xbox #ghostbusters - feels bad for robert being attcked by paps in the airport. also, feels bad for watching these videos. - Was listening to @Eminem and @taylorswift13 on the plane - I'm sad @kacisloss is leaving today. I should have told her it wasn't possible to change her flight. - @BerlyAnne *grins* I am glad to see a smile from you this morning! Packed-n-ready to fly yet?? No rush of course, 6 a.m. flight.. - Thanks to the crew of EclecticRadio and Havana for the great night! Only bad thing is the tiredness at this moment - Paid $362 for a brake job today. That sinks any hope upgrading flight or hotel room on @soefanfaire trip. Oh well - @aliyaki Weirdly I had that on the plane on Sunday night going to Melbourne, never had it on a plane before. It worried me *hugs* - @ElectriKateD aww, don't worry, I think that his flight should be there very soon - Small plane + Thunderstorms = No fun! But I'm in KC! Just called the cab - @burbankhays open the gate to your apartment complex? I'm just kidding. swing swing swing etc etc. love it! Love you! okay, that was gay - @honeybeetoys OMG 1000 followers! Have just added/edited your links on AB-sorry 4 delay - Going to DFW airport - @philhart @JoHart what I need is a really cheap flight next w'end-will have to have a look - @EmilyAllTimeLow no didnt get to but i found out that they spent the whole of monday in sydney airport and missed them by 2 minutes! - @dannygokey awww danny im sooo sorry!! i feel so bad that really sucks..please do me a favor dont go on that airline again..welcome home! - mother nature, please do not rain and delay @therunners flight. we are cutting it too close to game time and i dont want to miss the game - I will not cry at my last HILLS crew meal! (Yes I will ) - Wishing I could be at jetset with the space crew. - @spencershell When does your flight leave Thursday? If it's around noon, I'll probably see you at the airport. - feels so damn guilty. My uncle is picking me up in Newark. He shouldn't have to deal with this yucky delay - @kgutteridge this time next week I will be 30 minutes away from SF airport if flight from UK left on time - Dad-Ur dawter is grounded til she's 71! Wife argues but has 2 concede. Dawter (holding flowers) Happy Mother's Day, Mom! #crapsoaplines - worst part of the trip---wasting 2.5 hours going to/being at the airport - Dropping kakevin to ngurahrai airport. He's goin to SG, and I'm still here. Aaah I envy him for this - I dream on becoming a flight attendant. - @colettebett I have 4 more shows this summer. 2 more next week and 2 in July. Sittin on the plane back to Texas now. - @jordanknight Good Morning. I missed your Tink I was downloading the Uber Twitter upgrade. It's great! - @jellyybeannn :o Idk y, but I luv the airport lol. Lol, i'm old - @kaitibug For a wedding! In fact, currently on Virgin America. Crazy airline! Weeee wifi in the air! - not excited about getting up in 4 hours. Or the prospect of being at the airport at 4:30 am on Friday. - Sitting in the lounge of my hotel with the Dutch Pokernews guys. Flight at 5, not back home til midnight tonight. - Why do I have to work during the entire LSU game? Thought I'd get to watch the beginning at least but a rain delay has prevented that - @harbars I was going to say have a good flight but perhaps - "Have as good a journey as having to cope with other people will allow". - Sorry for the delay... I'm back home This #sotd fits my mood today for some reason. â« http://blip.fm/~7r2zh - extremely organised airport though, everything is easy to find, everyone is calm, its very nice... couldn't find any special cakes though - CNN: Brazil confirms plane crash (duh) prays go out to the family...horrible tragedy - In exactly 24 hours I will be seeing the girl at Kansas City International Airport... I'm very excited!! - @Darine Have a safe flight dear and ENJOY - Air France plane from Rio to Paris missing. Crashed ? Maybe just me, but I'm nervous... Hope they're ok http://bit.ly/15Y5iK - Early morning airport blues - http://twitpic.com/6qnxy - On the plane to Buffalo to visit GG! - @FrankAdman i saw you several times, but you too popular. i'm actually on my way to the airport. #140conf - trying to find the cheap (coach) airline tickets @ Tyrese4ReaL was talking bout, not cheap to the cheap $500+ - About to board plane to SG.. thank goodness for lcct wifi! Gonna miss malaysia - About to board my flight to kona! - Hey @hawkcam ive been away for a couple hours - sunny did you confirm flight? if so woohoo (hawkcam live > http://ustre.am/2f9i) - grrr... Jetstar's website and phone service doesn't reflect today's flight changes! It's called the electronic age, people! - @billyraycyrus you amaze me I love how kind you are to everyone and you seem totally grounded - is on her way to JFK airport..next stop GREECE! Be back the 23rd! - Have to clean install 3.0 on my iPhone. Upgrade left too much cydia crap around - @eachnotesecure Yeah, I agree, but we can't afford to upgrade the car radio just for the 15 minutes we're in it every day. - At the airport waiting to board my flight gunna miss my Pila! - coming home tomarrow, not looking forward to my flight at 5 in the morning - iTunes now mess up my play count for podcasts since the 8.2 upgrade - @Nigelclarketv aww cool. Transformers was crazy. We performed til 1. Partied til 5. & just caught our plane home!! Haha good times - is never thought i would be grossed out by lindsay lohan topless- Got an A in OB so out celebrating with the crew - Got an email from @Unitedairlines telling me I could have got my monday flight to SFO cheaper & Biz class if I had booked it tomorrow - Off to the airport to fly a plane solo again first have to drop daughter off at her new school... - chilling in Cabo. Weather's beautiful, plane is idling - @ the airport saying farewell 2 some grobie friends - Loving Lexington. At airport getting ready to leave and already looking forward to the return. - Said goodbye to Ash at the airport today have a safe trip bud! - 'OMG Damo quick, it's Concorde!' (turns out no, not Concorde but Vulcan) smaller apparently, not much of a plane expert! - Good morning everyone! Okay I pinched my finger this morning with my seatbelt, now i have a little blood bubble on finger! Ouch! - Tuff luck o well gonna make my way to LGA now! Don't want to miss my flight!!!! - Getting on a plane headed for some days of shopping in the windy city with mi madre - @ktsummer where u goin??? I wanna go on a plane somewhere - leaving to go shopping ! my last day in thailand ! leaving TODAY ! actually tomorrow about 1 am, but i have to be at the airport today ! - The first thing I smell walking off the plane in Nashville - Barbeque! - In long beach our flight doesn't take off until 4:50 but I'm almost home, Mexico was AMAZING But damn I'm bored I'm the airport - just said goodbye to Trev I suppose to be on plane next to him...life is just unfair sometimes - @chrisjsimon Naks! Japorms na japorms... Now get back to your seat ang fasten your seatbelt! Pasaway! - For anyone waiting for results of Telegraph #photog comp, so sorry for delay I'm having Internet issues. It's broken will post asap - Ah man. I cant wait! Just about to board a flight to Florida - going to airport... will be headed to DFW then DIA... goodbye Texas Tech! - Finally free from my flight. Cool temps in LA! - I go Sleep! Tomorro at the airport to see(trying --') McFly. - @jancornelis wish I could bring an assistant ! Too bad airline fees aren't very cheap - @TomFelton Safe flight home to you and Jade XX - has had a magnificent time in Galicia. Wine, grappa, liqueur now jammed into suitcase. Off to the airport! - ooo cool, my coach seat has a usb charger socket & 100bt, yay Singapore air! Too bad my cables are in checked luggage - Deja is @ the Alanta airport..she leaves in 2 hrs for london I miss her already, Atleast shes still in The US for now! - On the ground in Houston on my way to Portland for an MCN board meeting... Woke up at 3:30am for the flight - @Danatkinson Did you manage to sort out another flight? You crack me up - booking my flight back home soon - Watching The Soup and getting ready to take wifey to the airport. - @BabylonXCory .....words can't describe how much i miss you!! take a flight!! - my ipod touch died after upgrade, now cannot bring to PNG - Plane diverted back last night to Chicago due to med. emergency. Plane due out a hear at 4:45 pm. - i feel like pasta haha. sooooper dooooper bored.nothing to do grounded! ugggh. i have the rocklobster song stuck in my head.bahahaha - Whoa whatta landing- hello sunny san diego - @Zeeenia i was thinkng more on the lines of stayng at the airport and then followng them to their hotel, then findng out their room no. - The storm is here. Rain delay for the game. Glad I am not at the game tonight. Fans being asked to clear stands because of lightening. - sorry for the delay @jmanstudios - when @JohnMilleker and I get things posted - we'll share a link! - Getting ready for the early flight... joy. - Booking my flight back to Atlanta!! - do you have problems with !kde 4.2.3 and Radeons? after upgrade my X consumes over 50% in state of idle - Rain Delay for the Rockies.... - @jane__ I'm awesome! my parents grounded me from da computer during the week but not on weekends! - Landed safely in Florida. @grantdaws didn't fuss at all. Everyone on the plane was impressed with how good he was. Me = proud papa. - @BlissLauderdale Ughh Ive heard!! Unfortunately my flight leaves Thursday night!! - reserved a plane ticket! The move date is SET for July 27th! #fb - just got on the plane. yay! first class fun. haha! okay i'm turnig off the phone already. bye! next stop vegas. - I would be leaving on a jet plane ... if my flight hadn't been pushed back two hours. - @theBrandiCyrus, im selling my sidekick aswell & ima probably bid on yours 'cause i wanna upgrade - On board my flight...starting a new book, The Alchemist by Paulo Coelho...it was recommended to me by a friend. Tweet ya l8r! - Grounded I accidentally closed my sister's finger in the back door, so I'm grounded from going outside today. Oh Well. - @underoak did you upgrade your #iphone yet?! It's pretty sweet, a little anticlimactic since we still cant mms. Boo AT&T! - @Carrieisbarrie Hey yeah i'm ok thanks & yeah she did get away ok, i cried all the way home from the airport how r u sweetheart? x - @jahdog707 That tofu looks amazing. I need to catch a flight to that restaurant right now!! - @HeathE2003 i wish you could come! maybe you can hop on Zack's flight..we just won't have a fam vaca tee for you... haha - Empty plane! Luv it! I get the whole row to myself. But forgot my drink coupons at home again!!! - @Vanilla_B u BBM'd me? my bb has been showin its ass all day. smdh... time for an upgrade. - Going to the airport soon - @Geekvibes Yup I'll be there to and will go to airport from now on! - Waiting for my luggage so I can shower, change, get my Starbucks and then head out around Dublin!!!!! - i opened the gate for mom then it rained so hard and im all wet... it was fun... i wanna do it again... hahaha... the rain stopped... - @LizAnjos I'm staying at a hotel right at the airport (CDG) the first night... Then I'm staying at http://bit.ly/746qg - In Toronto - plane to Newark likely to be delayed by one hour Good job I decided to stay overnight in Newark tonight and drive tomorrow. - Somehow airport always reminds me of you... - last nite @questlove proved to me that there really is no such thing as too much stevie wonder. i woke up in my roots crew tank, smiling - @paulineANNtan you should've booked a flight! - @thesldude86 I never knew you spoke to the Aussie TODAY show Crew - Flight KM146 claiming bags already - just woke up, felt good to sleep in But i still gotta do my hw before my mom comes home or i'm dead/grounded =/ - : The US Open is in a rain delay... sad. - @FeliciaSlattery Happy, Happy Birthday to You!!...have lots of fun jumping out of the plane..hold on tight - You pay too much for food @ the airport AND it doesn't even taste good! - Nervous for the online check-in day I have to do it quickly. - In Val d'Or, waiting for my wife and youngest's flight to leave. a long drive to Chisasibi for me. - lol-ing as i watch the new season of Flight of the Conchords on SBS right now! @arjbarker u made a great rapper in the song 'sugar lumps' - @lrkane I like the film 'Flight of the navigator'. Does that help? - @Blancoei yeah that wud b crapy great! no ticket, no baggage, just lot of cash, toothbrush n' passport great, no? - running off to the airport, I'm gonna miss my ladies so much - @xxandip I've been up since 3 damn Glasgow airport!! - The weather is SO nice out and I'm at the airport booooo Had a great time in NY!! Gonna miss you all!!! And I miss him already ::cries:: - there were 3 slovaks on the plane that got lost http://is.gd/Mcel - @jimmuncie Pretty good I mean, I'm exploring this whole country music thing - definitely a calf looking at a new gate. - @faultlines Ah, finally. Sorry for the delay. Wala akong season 3 pero check ko. Baka nasa Manila yung files ko. - @lrwher couldn't change my flight! stuck in chicago - @DaySpringCards So glad to hear that! I'll be emailing you shortly. It's been a hectic last two weeks..I apologize for the delay - Up early. Walking thru Brooklyn with my bestie @missdenine getting coffee and donuts for the crew - @JCTurner Lucky you with the 3GS! I can't justify the upgrade expensive holiday looming. @stephenfry review in The Gardian was good? - @SouthwestAir Your rapping flight attendant should definitely MC the video- I hope i'm on one of his flights soon http://twurl.nl/23h5bo - Air France flight AF447 crashes with 228 passengers. Feeling ...May lord give all strength to their families and friends. - @TaniaUncensored tania, i think you should get athene and the rest of the crew on twitter - out the house heha :L duno why i dreamt that but yeah and i also dreamt that a flight tracker thing said that our flight had crashed :S - @idubbs I'm almost afraid to know. It makes the waiting for release/upgrade so much more painful - On my way to the airport to pick my brother up Using the mobile broadband to stay connected. Love this thing lol. #Jordy26 - MISSING AIR FRANCE PLANE: Debris, fire spotted in ocean http://bit.ly/3P41g6 (via @starwing) - Off to the airport for Maria's album launch - En route to Yankee stadium..meeting the rest of the crew there. Sorry I'm running late gang! - The director of the show said you can judge a runway by its breakfast - My friend works 4 Delta @ JFK airport, flys 4 free & missed her flight. Now she's on standby 4 another 1, pray she gets on! - I will never miss brakpan lol now a glorious 2 hour wait for my flight. Meh. - At london standsted airport right now. Not long now until I get home! - just dropped my baby off @ the airport - Got a free MTB yesterday that needs some work . . . or I can use the components to upgrade my other bike. - jesus poor people on Air France flight, I hate turbulence but they say planes can withstand that! I fly in 2 weeks - yay @Firemint Flight Control update came through on iTunes - reading about the plane in NJ ! how sad - @CoalEO Hi! A Kiwi? Flight of the Conchords fan by any chance? - Just returned from entire day of traveling to and from scenic Columbus, OH. 4 airplanes in 1 day. Airport food & no iPhone so no tweeting - I'm going to log off Twhirl for a while so that I can go make some caps of Enemy at the Gate and Whispers. See you all in a bit - PLEASE PRAY FOR THE FAMILIES AFFECTED BY FLIGHT 447. PLANE DEBRIS WAS FOUND IN THE MID-ATLANTIC. - this is the third time I'm going to be on a plane taking off today - has lost her driving licence = no ID It sucks being grounded from the pub for up to 10 working days! - going to the airport to pick Liam up - The Hangover: That movie is gonna be hilarious. wish i could see it with the crew - Phoneless and sad. the iPhone upgrade ate my SIM card! - @julespari was hotttt on the runway tonight!!! out celebrating - Heh "FBI: Terrorist Attack on Golden Gate Bridge May Have Been Green-Screened" http://tinyurl.com/p5kyst - @graywolf you should probably do one more USB stick and swallow it before you get on the plane - so you get through security - Looks like I don't qualify for iPhone 3GS upgrade pricing until 5/10/2010 - Rogers fail already! Customer central not showing iPhone upgrade for me - @esmeeworld have a good flight where u headed??! - @Abe_Frohman I should've gone into Sports Medicine or something...greater chance of landing a job with the Lakers! LOL! - On the magical express on our way to the airport - so i was attempting to upgrade wordpress to 2.8 and it totally hosed my web server - URGENT -- Air force officials say they have found bodies and debris from Air France flight 447 which crashed in the Atlantic. - @robinson1970 poor Grant is delayed as usual at the airport and NO JAY!!! - Boooooo! Still on the runway... An hour later I need to get home people! - No Iron Maiden Flight 666 DVD in the post today Will have to wait til Tuesday now as got it posted to work!!!! Plop!!! - Waiting at Orlando airport...an hour til we board and the kid is already a nightmare. Great, flashbacks from our flight last year - KLM Airline service is really good - had a very short flight from Riga to Amsterdam but they served even food. I was very suprised Cool! - Listening to "Cross the line" by Fiction Plane... Greatest song ever, better than sex, better than life... About to orgasm, nuff siad - delicious fusion mouthgasm in Sausalito after biking over the golden Gate bridge - @PamAtherton You are right, audio interfaces & microphones are always interesting to airport security - @Kenneth1968 Doh!!! That's more expensive than the flight!! - I can't get past 79 on flight control. - Flight is toast--cancelled! I'm now re-booked for tomorrow and heading *back* home. What a great way to spend the last 6 hours of my day! - my husband should be landing in Denver soon. I wish I could've gone. - @HamishGraham haha it's all planned out, my friend jeremy is picking me up from the airport and my bro is leaving his door unlocked - @simon Indeed. If you commit to 3 years can you upgrade during that time? Thanks for the respose Considering options for @getOnePage - @pinkhazebfly @JazzyLamby hahaha I Love you guys we the 8/9 crew hahaha PinkHaze was On IT tonite/mernin! @MariahCarey goodmerrnting/Nite - W8ing @ Dubai airport, HK is better... - The news is always so depressing I feel for thoes people on that flight. - Leaving in FOUR DAYS for a 12 hour flight - Off to bed; good night everyone. *although I feel terribly sick....atleast I'm going to bed happy because the BREW CREW won tonight! - Plane crashed in france - @dannygokey yep, you know that pic will be all over! Hope you have a safe trip home n don't hate NY 2 much after the luggage incident - Leaving to the airport in 30mins - i am sleeepyyy. today = busy. work 10-4, library, class 6-7:20, tanning, packing, bit of sleep, atlantic city airport, myrtle beach - @ashadihopper Go to the Irish Village, next to gate 123 and have a beer for me *sniff* - Flight of the Conchords was awesome! Had sweet seats 3rd row courtesy of Luke's friends. No pics though Might... - http://bkite.com/07ywb - Adelaide airport - everyone now has been waiting 45 mins for bags.... From my flight only, not happy - Waiting to head to airport? Not enough time to actually go out and do something but enough time to render me utterly bored - on my way to the airport - So proud of SCC! " Love SAA and my fav friday crew from kerck! You guys make my life . - @el_friendo I have done a couple here and there but damn school has taken a lot of time away from it! I've CS3. I should upgrade - Just had a good skydive and a crash landing in style - Traveling SFO-BOS, and United seems to have pulled my upgrade #yankeeclassic - @kinagrannis booo, that's not fun!... have a good flight - Misses the DB crew - In plane on my way to texas - Goodnight to all gotta be at the airport way 2 damn early tomorrow - Back home now and getting into tiding up my flat. My luggage was never found. Ah well. Faux Positifs (Prédit comme 1, mais vrai label 0) : - @chewy96216 The Crew, some brew and Eldora racing. All rainouts aren't bad - Stuck @ the gate friggin' tired as all get out.... - Just dropped Josh off at the airport - Finally done with the story boarding and IA of my online game. Only thing left to do is code it... but I can't decide over Jaxer or PHP. - Almost done packing, I have to be at the airport at 12:30. Then DALLAS! I bought the new Jonas Brothers CD maybe I'll see them down there - @tommcfly Hey tom put me in plane with u and the guys i want back to brazil for see my family - @galaxydazzle I want to! if I book today I have a flight for 10 euros... but i need a creditcard for it! and somebody to go with - @eesti93 I was! We were delayed sitting on the runway for an hour because of the wind and rain. - Is going WI but will be back in AZ on Monday! Yay! Going to watch Friday the 13th on my iPod on the flight. - upset stomachhh GROUNDED! &jealous, of some hoooee. :/ iiii wwaanntt hhiimmmm,</3 - Alarm didnt go off. Missed my flight this morning waiting for three hrs in airport for next one - @louiseodquier yep trobs safe flight coming homeeee. cant wait for u to see the new baby - @agent242 just got it...guess there was a bit of a delay with the direct message. thanks muchly. - @JeepersMedia can u buy the flight for me?" cuz i cant go to NYC israel is too far - .....LOVE free wi-fi at the FLL airport!!!! - @jayncoke ooh, On a Plane would be awesome! Can you please send that to Andy Samberg? - At the airport problem with our tickets - On the plane now. I do quite like Easyjet. They get a bad ride cuz they are budget, but I find them to be pretty proffessional - Listening to " hey you" by pink floyd.. Ha! Takes me back to boarding school. Oh the pain! - Just landed in Tucson. Didn't really sleep on either flight. anyone know of a good place for lunch? - O.K TWITTERLAND IM OFF 4 THE NIGHT/ EVERY OTHER WEEK IM OFF 4 A FLIGHT/ I CAN SEE MY FUTURE/ IT'S RIGHT ABOVE BRIGHT/ RIGHT ABOVE A KITE - @_CrC_ sounds awesome to me. when we flying? do you serve food on the flight? let me guess, tacos - Good LATE nite/EARLY mornin Twit Fam...on a plane back to So. Cali in 10hrs.. :/ LOL - ZOMG i was just rubbergloved at the airport on suspicion of swine flu - Big start up party yesterday! Hope it wonât affect the performance of the radios crew today ! www.radiowroclove.com - Yay, 2 new flight control maps coming in the next update A beachside runway and aircraft carrier - @jtimberlake Next time you come skiing/boarding here in Utah, you should give me a tweet! There are some cool spots only the locals know - Kayak.com hasn't been finding me the best deals lately Just found better deals on Virgin and JFK for a flight on their own site. Hrm. - My feet still hurts. But I seriously miss walking the runway. I wanna do it again! :| ) Still have curly hair. - Taking my baby to the airport. - is thinking how sad it was about the pilot from Continental Airlines. Glad the rest of the flight is safe... - In B-town. Flight delayed 4 4hrs Did dry run of my pitch to the Westcon Group tomorrow. They have a confidence monitor. Whew! #TheVibe - Ok... finally got some form of internet set up! Sitting at the gate. Checked Google Maps. Hollywood's 30 mins away only? Tempted - Thx to everyone for reassuring me about my flight tmrw. As travel planner, I know it's safe. As gf/daughter/sibling, i'm a bit nervous - Can't upgrade iPhone to OS v3, something about "server not available" - misssesss all the crew - got wasted, mom came and picked me up, now grounded, woo, what a great day - is tryna find her a 30 yr old junt with a 401 k, stable job, good home with an extra room 4 me and my baggage..so tired of u young dudes - My sunburn delay me from working out.... I'm still tender... - so sad to go back back to philly philly in the am...until next time dnc crew... edi teddy... and such! - @edbrill 125 miles of a free ticket will get the engines rev'd and the brakes released on the runway. But that's about it. - I feel so sorry to the friends and families of the ppl who were killed in the Air France flight. - Just checked in. Coming back Reached Gauwhati airport at 11.30. Was closed! There wasn't a single person there. Not even a guard!! - At the Miami airport checking out magazines...about to fly to Venezuela to visit some fam...I'll miss my tech but I'll love the food - so, i just found out that i've got enough frequent flyer miles to get a FREE plane ticket to nyc! life is good - Trying to upgrade to 3.0 and getting no where. - dammit, forgot something at home... cant sleep and my flight's in a few hours - Awake, about to reheel boots, get essay, buy shampoo. Parents' flight ended up being cancelled, so they are not in Prague - Getting ready for drive to airport - @Jonasbrothers @ddlovato what day r u landing on Spain? My bff and I got a surprise 4 u! You'll love it! Please answer!! - @mechangel Safe flight sweetieeeeee!!! So sorry I didn't get to see you before you left - I think my flight is delayed its not here yet - haz her gown ordered today. and dropped boogie off to the airport. and now.. laundry galore is to be continued. - Why did I book a flight for 6:50am tomorrow? - ZOMG I just figured out I can finally upgrade my phone to the iPhone :] I'm excited now Anyone else use the term ZOMG? Reply with ZOMG - @RiskybusinessMB i wish i was in dallas. My bffs are landing there soon for a layover to arizona - @sealdi airline booking? like @noreen's problem with Cebu Pacific... - first day of classes but i just fixed things. had dinner with orgmates. now alone in my boarding house. i have internet here already! - Flight b4 us got canceled hoping ours at 3:30 won't Otherwise we have 2 drive 2 Dallas 2 get on R connection. http://twitpic.com/3j4p8 - @dannygokey Hope your luggage follows you this time! - at the airport. Have a nice trip fikri..All the best to you - well bye guys if i can i'll call yu at airport or on way to there hmmm longest three weeks of me lifee ily. - @dutchiegurl aww ur welcome. glad u had a nice night! hope ur having a safe flight. will keep in touch with u over fb/twitter - @joxlan Yeah you lucky thing! I have to pay quite a lot to end my current contract early for the upgrade it seems - I've boarded my plane, and they just said to turn phones off so... Bye - Looking into freeware flight sims . Any suggestions? - Aw im taking my cousin to the airport 2maro morning..she's gonna be gone for 7 weeks its gonna be sad w/ out her - @frankmartin you know, my tweetdeck "broke" after upgrade - lunch done and connection found in Madrid Oracle CVC so sorting out mail etc before heading to airport for trip back to blighty - @6stringhero Have a great flight and tell her I said hello from S FLA. And please rock and roll for me I love it!! - Hmm, @NicoletteTay should be on the plane now on her way to Melbourne! I'm going to miss her loads! - Now that their is a more local airport its nice to see air shows in my hometown - Off to the airport. :] Cyprus, HERE I COME! - Sitting Ready Reserve at ATL airport...hoping to fly out but kinda wanting to stay home today - Flight delays suck - BTW, I had a diet pepsi, not beer!! Guess what? Our flight has been delayed!!! - @NKOTB ahhhh I'll be leaving on my jet plane in about 9 hours...it can't come soon enough!! see u guys tonight - @Kutski Quite pissed off i have to wait for the my upgrade to get the new handset though - Our plane had landed!! FINALLY gonna get to board...an hour late. stupid rain!!! - Is watching tinker bell nd still grounded - It would be cool if there could be an upgrade. Like more bosses and stuff - Air France confirms plane crash... 228 feared dead - Flippin heck, the upgrade price to Windows 7 Ultimate is stupidly high. Not exactly encouraging people to upgrade, Microsoft. - Now I am leaving at 10:22 PM on flight United Airline 44. 14 hours lay over in the LA airport. How Nice. - Touched down back at heathrow. Now for the layover. - Crap. There's a hipster judge on the plane and I'm not being cool - loves that it is light out at 5 am. last bkfst before summer with science crew. then off for more Nemo. I'm gonna sing more today. - I want to go to Disney World and meet Mickey! But no by plane, it scares me! - Wow...this is a small airport...I'm spoiled by JFK. @junkprints You feel me! It's hard to find good help these days! - @ShawneyJ Aww, sorry you're bored hon! Play Airport Tag? Which airport are you stuck in...some are better than others. #ballashoughband - Praying for all the family and friends of the people from the flight outta Brazil to Frace. It's a very sad day with this tragic loss - @DavidArchie Hi Archie! All of your fans in the US are waiting for you there! Have a safe flight back to US! - @omfericit Yeah Can't wait to get on the plane. I'm so tired...Black Sea on Sunday, yes! - @theflyingpinto thank you so much, i'm going to need some friendly faces in the crew room i'll let you know when i get my transfer - Jakarta Globe apps not working at all after 3.0 upgrade and app update - @Jajawilk ya? well jump on a plane and go! i just cant w8 for the weather. ill be lonely til august! boo - On our way to the airport... then to New York. - at the airport on way home. - @jeffpulver have a nice flight - Boarding. Bye bye Britain http://bit.ly/q23lz - Warm morning as per usual. Have to get on a plane this afternoon. - Landed During the flight, the kid next to me pulled out a loaf of bread and made a sandwich. Not gonna lie, it was pretty awesome. - @dannygokey and if someones does see you on the plane drooling, don't worry, they will take a picture/video and post it online! - New luggage... new clothes... new haircut... I'm on a roll! But now I'm tried also - looking at all my old myspace status' oh mann. Skyrockets in flight! afternoon delight! AAAAAAAAfternoon delight! - @jonnyrockunit pretty epic crew as well - @chrisfinlay Etchells are still landlocked but dinghies are sailing. Looking for a crew for Laser II but too cold to capsize right now - @TheMandyMoore Hee! I'm about to board a plane to Vegas myself! Hope you have a good time! - @masukair_king quest crew rocks my sock! and jabba... and kaba! - @4phun yes but it is the first time a plane crashes in this route - i'm flying allegiant to my dad's this year..i'm SO not excited. read about it online. i don't want a noisy, dirty, late plane i'm scared. - Bedtime. Early flight tomorrow. - you so much flight was ok thanks. thank you so much for last night. hope all's ok and i'll ring you soon. i love you and miss you xxxx - @DavidArchie Hi David! good to know your safe from the flight. we'll miss you here in the Philippines. - Everyone pray for the people on flight 447 and their families. What an awful situation!! - headed to the airport for alice springs then to bush camp #DSAA09 - @jessnextdoor yea... sad noh... when I heard about the missing air france I prayed na sana the plane just landed somewhere safe... - @abc7 at the Los Angeles Chamber Orchestra concert at Royce Hall. Little delay, but no damage. - @TexasGirlSari back and better then ever was able 2 save all my old stuff and upgrade x10 wat out u partna - @BeckyW_ "Americas Best Dance Crew" - Nice to know I'll have to check them out on YouTube. - G'morn Monday...The camping princess crew survived!!! Thanks for the memories and NO I won't camp again!! - Finishing up OMA mtg before heading home. #Aer lingus gave me inside seat not aisle. So have to dash to airport for reseating lottery - There's nothin like sweating your ass off on a plane during the summer while sittin at the gate.. The devil is beside me with ice cream - @MrPointyHead jealousy is the first sign of one's success! hope you get a flight soon to this beautiful Europe - @chrisbrogan The back of the plane is where all the whafts of stinkiness collect. - @bretharrison You know I meant Grounded, not Grounder. - Flight Simulators are not my friend. I'd be awesome if it wasn't for all the crashing. - is in the airport in Philly annoyed...my flight is delayed - @christiancuervo hahah ily, how was the flight? Send me a pic of how they make PS3's - is taking Alix Eve to LGA for her flight to CMH for her overnight visit to Kenyon College. It's supposed to rain. - Grounded.. SUCKS - congrats to @samsameni for landing Casey Thompson!!....she's almost as good as our chef - the moth on the window lost its battle to the wind bye bye moth...hope u have a safe landing. - We're ALL so blessed 2 B here. 05 plane to is VERY NEW, shows that anything can happen. God is still God, it was time. - took mom and littles to the airport and is missing them already stupid rain isn't helping lift the spirits much either!! - stressin...flight in 8 hours - #robotpickuplines----> You upgrade me. & You make my synchronous gyroscopic modulaters go pitter pat! <--------Yeah? Yeah! - picking up someone @ the airport - I'm up too dang early!!!!!! To catch this flight! The things you do when you love what you do!!!!! - At the airport happily awaiting my flight back to Houston and my sweet @stephenhadley. So great to come home to the love of my life - @DonnieWahlberg if i had a plane now id get there for sure - Upgraded client's website from drupal 5 to drupal 6. You need to upgrade one project of yours... the second upgrade will be like 1, 2, 3 - Headed to church in Beijing. :-] Then l o n g flight - Heading to Bill's for breakfast with the Realmac crew - Hi @YDURNAS very sad with news of flight 447 that crashed on the Atlantic from Rio to Paris - Current 3G owners CAN upgrade to a 3G S for an early upgrade price, just add $100 to everyone elses price - @SLessard Hanging out at the gate...come say hi? - @LogiBeer Oh yea, thats a great idea and then just concentrate Sunday with the editing and only the editing crew, excellent! - flight is as much my fault as anyones, i should have paid attention to the date not just time time. i was so excited i failed to validate - @EstJesusNoWhere there were about 27germans on the plane.. but no matter what nationality. they are human. but i guess theres no hope - in 24h i'm sitting on a plane, flying somewhere over germany on my way to the other side of the world. sounds like a plan to me. - I am waiting and literally running a countdown for Iphone 3.0 OS..to arrive for upgrade .. - Landed and waiting at baggage. - Heading to the airport - Back down on the Quay watching Windows timers again!! BORING!!! Off to see Iron Maidens Flight 666 movie in Portsmouth tonight though - Off to see Dutch Family Robinson today. Horrible weather for flying on a little city hopper William's (age 1) first flight too :-/ gulp! - No seatback TV this flight good thing it is only about 50 minutes. @ Concourse A http://loopt.us/23Tucg.t - checking in online for my flight tomorrow easter is almost here!! - TWILIGHT CREW AT MTV AWARDS. WOW. LOVE KRIS AND ROB. <3 - @loisyoung92 im getting an upgrade on my phone - AirAsia flt BWN KL delay for 45min -- mmm hv to wait until 9:30pm - sitting in narita airport with a midi keyboard composing a soundtrack... ain't technology amazing!!! - @deannapappas aww what a bummer! well atleast u know u won't miss ur flight ;) so u gonna go see "vegas" - on the plane & in the last row but close 2 bathroom & I have tea + 1 L water! still on time! #C2EA #YAI - Air Can doesn't allow online check-in for Expedia tickets so, combined w train malfunction, am not sitting on aisle for 1st time in ages. - @dannygokey Ok that just stinks!! Where did your luggage travel to...or are they still trying to figure that out? Bummer. - needs to download an upgrade to my blackberry so it can run smoother however, that means I will hve 2 give up an app...ubertwitter - @trvsbrkr in airport,can't watch fight. I'm takin Faber too. Post result please. - Detroit Rock City. Back in Eastern Standard Time. Can't wait to see my airport picker upper. - J's laptop is in AppleCare and she's on mine this weekend. This would have been a perfect N97 moment. Damn US launch delay. - @mandee_k Flight leaves in 2 hours I'm going to miss Spain (and Joshy!) Will text from T.O. Take care this weekend Mands! talk soon xo - really wants to camp at download so she can watch flight 666 with her boy - Uh, looking at the experience present site. A flight with a MIG 29 would I do as well. Maybe when I win a lot of money - @stevesumpton yes I do agree! Flight was good, got to sleep most of the way! - @lyndalpn Have a nice flight - In the airport again heading to az to see my family...wishing I was still with mike tho - Have arrived in vegas! Our FA on @southwestair sang to us on takeoff! "Fly me to the moon..." it rocked - getting on the plane catch you when i get back to columbus - chilling at will apt. fab. view. close to the golden gate. watching american idol!!. cant believe ADAM!!!. didnt win!. - taking grandma to the airport - @Justin_Steele I had a 3G, with dev 3.0, bought a 3GS with 3.0, tried to upgrade my 3G from dev 3.0 to 3.0, now it's bricked. - going to the airport . going home super excited - @vibzfabz i'd like to but i cant afford the price.mayb i'll go to the airport and stop the team ah, idk. hope you get that ticket as ... - Man another vep performance let's get it. My heart goes out to my crew sorry about the family issues I'll cover your work today stay up - Compiling all my music for my new Ipod...since I lost my old one on my flight back to Miami - Sigh. Still 4 more stations till my girlfriend Or my girlfriend's plane at least. - @beckyhope okkie thats nice of you becks <3 well, my flight was just about 1 and a half an hour, but i had such a nice steward... - @OutnumberedMama This is Sue, sorry 4 the delay, things R good here. Have U joined the nation yet? http://www.seventhgeneration.com/ - landed at SeaTac. The baby was much better this leg of the flight, thank goodness. - The Air France missing flight does not look good. - Oh FUCK. the mini usb has stopped taking a charge reliably now. This phone may not last me til mid-july upgrade - @DitaVonTeese La Duree in the Paris airport???? Good to know - Watching CNN news - hope they find that plane! Not looking good though - That was the smoothest Tiger Airways flight I've ever had. Thanks - @billpalmer Yeah, not super-gee wiz. I hope you are right, but even an incremental upgrade would be fine with me - Didn't get a free upgrade on my first flight as an airtran elite miles member Maybe on the way home. Four hour flight here we come! - Boarded plane outta denver Will be in Vegas in 2 hours - Can't believe my wallet was stolen on flight - @takisoma if you were my kid you'd be so grounded! - Just purchased my plane tix to the UK... unemployment isn't so bad after all - Dropping @bethlattin off at the airport Then back to work tomorrow.. - Off across the oceans again...hoping for a smooth flight with no lightening please...RIP to those poor folks Tweet ya Thursday - @MistressB So am i let me tell you..not looking forward to 14 hr flight, but it will be worth it - @nutone boooo I'm not open for upgrade till Feb 2010! - Ha ha, 'Flight of the Concords' cracks me up - @pocklock aww I can't say I'd miss the picnic but I definitely miss being antisocial with you and the old crew - Monitor at Chicago aiport says my flight to Raliegh, NC is "on time" - pray that it stays that way! - @shaundiviney the male host on my plane thought you were cute lovey - He's leaving to the airport and will be here in approx 13 hours. yippie! yay! - SHIT, have to wake up at 6:30 tomorrow Stupid early flight - @svcotton Indeed they are not - be there shortly // Take a British Midlands flight maybe from Dublin to Grlasgow or Edinb. no worries. - Waiting to board the plane. Frustrated that Safari on my iphone not picking up the wifi providers home page My ASAS eee works fine - @jacqui_cooper The last flight in the Space Shuttle program, and I want to watch the launch. I've never seen one live. - Such a shame about the Air France Flight 447 - still at the airport - @madeleineannie got the plane tickets last week xo - On the plane about to take off back to minn. I am really not ready to come back yet - when its my birthday i already have to go to the airport at 05.30 how terrible! - got a flight change, still not looking forward to leaving - so many kids are going to be on this flight T minus 65 minutes - hopes Silah has a good flight. - Johnny Depp & crew racked up a $4400 bill @ a resturant & Depp left a $4,000 tip for the waitor do u think he did it on purpose or error? - Pray for those who were in the Air France flight - Just arrived in London from L.A, long + boring plane ride...not fun - @MusicIsHealthy right girl, and than the whole west coast lets book a flight we'll meet at the airport see ya - @tommcfly hey tom! hope you have a great flight! can't wait to see you in Chile!!! - Geocaching.com seems to have a huge delay delivering Pocket Queries - @UrBaN_eLySsE ihad always wanted her 2 do an album like that 1st disc. iLike Video Phone/Diva/Upgrade U Beyonce, but Halo Beyonce is - @Contra_tss Yeah, but they building a train & there's always some thangs wandering around that airport & the hotel is inside the airport - @fatboy951 Why didn't you fly out of Ontario? From Curious in Ontario...and beers are expensive in the airport. ~Michael~ - Sunny Sunday ahead. Will visit the airport to watch planes with DS. Just what a woman wants, but a mother does. - it's about damn time that I got rid of the extra *bullshit* baggage!! - @matigo Good idea. No hot drinks before landing - And theeeeeeres the turbulence - MAJOR work stresses. oh sheeet. just want to catch a plane out of the country cannot wait for wednesday week tho ;) - My baby's leaving on a jet plane. Dont know when i'll see her again... Oh ok i do. Next wednesday. Gonna miss her though - @GenderQ unless you have a private plane you are telling me about - Ok, I made it on the new plane I'm lame. I need to be more spontaneous.. If only I didn't have something important to do tomorrow.. Damn! - Is loving Adrians singing on Wasted Years on the Flight 666 soundtrack - @fludwatches yea wiilib beer garden was dope. gotta try astoria one too! i just sent homegirl off to airport my room looks mad empty
mlflow.end_run()
2024/12/06 13:11:56 INFO mlflow.tracking._tracking_service.client: 🏃 View run Pycaret (CountVectorizer) at: http://localhost:5000/#/experiments/374735653194037029/runs/f704392325a64046b6a09ad4514e26a5. 2024/12/06 13:11:56 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029.
import pandas as pd
import plotly.express as px
from sklearn.neighbors import NeighborhoodComponentsAnalysis
# Séparer les features des labels
features = df.drop(columns=['target', 'prediction_label', 'prediction_score'])
true_labels = df['target']
predicted_labels = df['prediction_label']
# Appliquer Neighborhood Component Analysis (NCA) pour réduire à 3 dimensions
nca = NeighborhoodComponentsAnalysis(n_components=3, random_state=42)
nca_transformed = nca.fit_transform(features, true_labels)
# Création d'un DataFrame pour la visualisation
nca_df = pd.DataFrame(nca_transformed, columns=['NCA1', 'NCA2', 'NCA3'])
nca_df['True Labels'] = true_labels
nca_df['Predicted Labels'] = predicted_labels
# Identifier les points mal classés
nca_df['Misclassified'] = nca_df['True Labels'] != nca_df['Predicted Labels']
# Identifier les faux positifs et les faux négatifs
nca_df['False Positive'] = (nca_df['True Labels'] == 0) & (nca_df['Predicted Labels'] == 1)
nca_df['False Negative'] = (nca_df['True Labels'] == 1) & (nca_df['Predicted Labels'] == 0)
# Retrouver les textes mal interprétés pour les faux positifs et les faux négatifs
false_positive_texts = train_df.loc[nca_df[nca_df['False Positive']].index, 'text']
false_negative_texts = train_df.loc[nca_df[nca_df['False Negative']].index, 'text']
# Ajustements de la taille du graphe et des marqueurs
marker_size = 4 # Taille des marqueurs plus petite
figure_size = (800, 600) # Taille du graphe ajustée
# Graphique 3D interactif avec les labels réels
fig1 = px.scatter_3d(nca_df, x='NCA1', y='NCA2', z='NCA3', color='True Labels',
symbol='Misclassified', title='NCA 3D Projection with True Labels',
labels={'True Labels': 'True Labels'},
color_discrete_map={0: 'green', 1: 'orange'}, opacity=0.7)
fig1.update_layout(scene=dict(aspectmode='cube'), width=figure_size[0], height=figure_size[1])
fig1.update_traces(marker=dict(size=marker_size))
# Graphique 3D interactif avec les labels prédits
fig2 = px.scatter_3d(nca_df, x='NCA1', y='NCA2', z='NCA3', color='Predicted Labels',
symbol='Misclassified', title='NCA 3D Projection with Predicted Labels',
labels={'Predicted Labels': 'Predicted Labels'},
color_discrete_map={0: 'green', 1: 'orange'}, opacity=0.7)
fig2.update_layout(scene=dict(aspectmode='cube'), width=figure_size[0], height=figure_size[1])
fig2.update_traces(marker=dict(size=marker_size))
# Afficher les graphiques interactifs
fig1.show()
fig2.show()
TFIdF¶
from sklearn.feature_extraction.text import TfidfVectorizer
tfidf = TfidfVectorizer(min_df = 2,max_df = 0.5,ngram_range = (1,2), sublinear_tf=True)
train_tfidf_sparse = tfidf.fit_transform(train_df['preprocessed_text'])
val_tfidf_sparse = tfidf.transform(val_df['preprocessed_text'])
test_tfidf_sparse = tfidf.transform(test_df['preprocessed_text'])
import scipy.sparse
train_tfidf = pd.DataFrame(train_tfidf_sparse.toarray(), columns=tfidf.get_feature_names_out())
val_tfidf = pd.DataFrame(val_tfidf_sparse.toarray(), columns=tfidf.get_feature_names_out())
test_tfidf = pd.DataFrame(test_tfidf_sparse.toarray(), columns=tfidf.get_feature_names_out())
from scipy.sparse import csr_matrix, hstack
train_tfidf["target"] = y_train.values
val_tfidf["target"] = y_val.values
test_tfidf["target"] = y_test.values
# import pycaret classification and init setup
from pycaret.classification import *
s_tf = setup(
data=train_tfidf,
target='target',
test_data=val_tfidf,
fold_strategy='stratifiedkfold',
fold=10,
normalize=True, # Mise à l'échelle des données
normalize_method="minmax",
session_id=123,
index=False,
use_gpu=True,
log_experiment=True,
experiment_name="approche_classique",
experiment_custom_tags={'framework': 'pycaret', 'vectorizer': 'TfIdf'}
)
[LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements
| Description | Value | |
|---|---|---|
| 0 | Session id | 123 |
| 1 | Target | target |
| 2 | Target type | Binary |
| 3 | Original data shape | (4851, 4384) |
| 4 | Transformed data shape | (4851, 4384) |
| 5 | Transformed train set shape | (3395, 4384) |
| 6 | Transformed test set shape | (1456, 4384) |
| 7 | Numeric features | 4383 |
| 8 | Preprocess | True |
| 9 | Imputation type | simple |
| 10 | Numeric imputation | mean |
| 11 | Categorical imputation | mode |
| 12 | Normalize | True |
| 13 | Normalize method | minmax |
| 14 | Fold Generator | StratifiedKFold |
| 15 | Fold Number | 10 |
| 16 | CPU Jobs | -1 |
| 17 | Use GPU | True |
| 18 | Log Experiment | MlflowLogger |
| 19 | Experiment Name | approche_classique |
| 20 | USI | 3d81 |
[LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] There are no meaningful features which satisfy the provided configuration. Decreasing Dataset parameters min_data_in_bin or min_data_in_leaf and re-constructing Dataset might resolve this warning. [LightGBM] [Info] Number of positive: 1, number of negative: 1 [LightGBM] [Info] This is the GPU trainer!! [LightGBM] [Info] Total Bins 0 [LightGBM] [Info] Number of data points in the train set: 2, number of used features: 0 [LightGBM] [Info] Using GPU Device: gfx1035, Vendor: Advanced Micro Devices, Inc. [LightGBM] [Info] Compiling OpenCL Kernel with 16 bins... [LightGBM] [Info] GPU programs have been built [LightGBM] [Warning] GPU acceleration is disabled because no non-trivial dense features can be found [LightGBM] [Info] [binary:BoostFromScore]: pavg=0.500000 -> initscore=0.000000 [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements [LightGBM] [Warning] Stopped training because there are no more leaves that meet the split requirements
# compare baseline models and returns the best 6 ones
best_models_tf = compare_models(sort='Accuracy', n_select=8)
| Model | Accuracy | AUC | Recall | Prec. | F1 | Kappa | MCC | TT (Sec) | |
|---|---|---|---|---|---|---|---|---|---|
| rf | Random Forest Classifier | 0.7096 | 0.7766 | 0.6733 | 0.7273 | 0.6985 | 0.4193 | 0.4211 | 1.2240 |
| et | Extra Trees Classifier | 0.7060 | 0.7829 | 0.6199 | 0.7522 | 0.6785 | 0.4123 | 0.4199 | 2.0240 |
| lr | Logistic Regression | 0.6957 | 0.7726 | 0.6903 | 0.6992 | 0.6941 | 0.3914 | 0.3922 | 0.7040 |
| lightgbm | Light Gradient Boosting Machine | 0.6863 | 0.7583 | 0.6721 | 0.6935 | 0.6817 | 0.3726 | 0.3738 | 0.8010 |
| gbc | Gradient Boosting Classifier | 0.6757 | 0.7584 | 0.7244 | 0.6614 | 0.6910 | 0.3512 | 0.3535 | 10.1400 |
| ada | Ada Boost Classifier | 0.6698 | 0.7318 | 0.4906 | 0.7684 | 0.5969 | 0.3402 | 0.3658 | 3.5500 |
| ridge | Ridge Classifier | 0.6595 | 0.7178 | 0.6509 | 0.6632 | 0.6565 | 0.3190 | 0.3195 | 0.7830 |
| svm | SVM - Linear Kernel | 0.6586 | 0.7205 | 0.6686 | 0.6569 | 0.6622 | 0.3172 | 0.3177 | 0.9510 |
| dt | Decision Tree Classifier | 0.6418 | 0.6412 | 0.6152 | 0.6524 | 0.6321 | 0.2837 | 0.2852 | 1.8070 |
| nb | Naive Bayes | 0.6206 | 0.6243 | 0.5776 | 0.6345 | 0.6040 | 0.2414 | 0.2429 | 0.6350 |
| knn | K Neighbors Classifier | 0.5747 | 0.6193 | 0.5964 | 0.5743 | 0.5822 | 0.1492 | 0.1510 | 0.7560 |
| lda | Linear Discriminant Analysis | 0.5741 | 0.5903 | 0.5394 | 0.5810 | 0.5591 | 0.1483 | 0.1488 | 8.2790 |
| qda | Quadratic Discriminant Analysis | 0.5178 | 0.5240 | 0.5048 | 0.5174 | 0.4905 | 0.0362 | 0.0381 | 6.3660 |
| dummy | Dummy Classifier | 0.5013 | 0.5000 | 1.0000 | 0.5013 | 0.6678 | 0.0000 | 0.0000 | 0.4540 |
2024/12/06 14:22:32 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:22:32 INFO mlflow.tracking._tracking_service.client: 🏃 View run Random Forest Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/1b7562cf9df94ceb9575fe94f25b4051. 2024/12/06 14:22:32 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 14:22:36 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:22:37 INFO mlflow.tracking._tracking_service.client: 🏃 View run Extra Trees Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/27e638777c2d446281d8fad7d6abd349. 2024/12/06 14:22:37 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 14:22:39 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:22:39 INFO mlflow.tracking._tracking_service.client: 🏃 View run Logistic Regression at: http://localhost:5000/#/experiments/374735653194037029/runs/2c1e8e190f184898b39d0a451c18d964. 2024/12/06 14:22:39 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 14:22:42 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:22:43 INFO mlflow.tracking._tracking_service.client: 🏃 View run Light Gradient Boosting Machine at: http://localhost:5000/#/experiments/374735653194037029/runs/6a9e202af6fe40cb899a0c90907ff80d. 2024/12/06 14:22:43 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 14:22:57 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:22:57 INFO mlflow.tracking._tracking_service.client: 🏃 View run Gradient Boosting Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/1e0f0447948542b096de1c10d2d5ebca. 2024/12/06 14:22:57 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 14:23:04 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:23:04 INFO mlflow.tracking._tracking_service.client: 🏃 View run Ada Boost Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/76f09c11900f46ad9e3256a2eeb15f1c. 2024/12/06 14:23:04 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 14:23:07 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:23:07 INFO mlflow.tracking._tracking_service.client: 🏃 View run Ridge Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/4565efa4ff5d43cab957d4c2b4dddce8. 2024/12/06 14:23:07 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 14:23:10 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:23:10 INFO mlflow.tracking._tracking_service.client: 🏃 View run SVM - Linear Kernel at: http://localhost:5000/#/experiments/374735653194037029/runs/78f944640ffb4849b7cb0bf2878adbac. 2024/12/06 14:23:10 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 14:23:11 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:23:11 INFO mlflow.tracking._tracking_service.client: 🏃 View run Decision Tree Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/d4772e64515e4d189786544e859c4d7a. 2024/12/06 14:23:11 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 14:23:12 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:23:12 INFO mlflow.tracking._tracking_service.client: 🏃 View run Naive Bayes at: http://localhost:5000/#/experiments/374735653194037029/runs/6a4d6164761940aca4b35d0197c05fe9. 2024/12/06 14:23:12 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 14:23:13 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:23:13 INFO mlflow.tracking._tracking_service.client: 🏃 View run K Neighbors Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/8756904d9ce0477d93c40f7d29df5bcb. 2024/12/06 14:23:13 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 14:23:14 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:23:14 INFO mlflow.tracking._tracking_service.client: 🏃 View run Linear Discriminant Analysis at: http://localhost:5000/#/experiments/374735653194037029/runs/534db0d5630045c89262831c4176e4bc. 2024/12/06 14:23:14 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 14:23:14 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:23:15 INFO mlflow.tracking._tracking_service.client: 🏃 View run Quadratic Discriminant Analysis at: http://localhost:5000/#/experiments/374735653194037029/runs/19daa709436648409be2c818a77b4b5e. 2024/12/06 14:23:15 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029. 2024/12/06 14:23:15 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:23:15 INFO mlflow.tracking._tracking_service.client: 🏃 View run Dummy Classifier at: http://localhost:5000/#/experiments/374735653194037029/runs/ee6d33e807ca494b89f1fcc11348125f. 2024/12/06 14:23:15 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029.
Dans ce second embedding on va exploiter un algorithme "économique" la régression logistique, même si il n'est pas le plus performant de cette comparaison.
# Amélioration du meilleur modèle (logistic regression)
tuned_model_tf = tune_model(estimator=best_models_tf[2],
optimize="Recall", choose_better=True,
verbose=False)
2024/12/06 14:28:16 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:28:16 INFO mlflow.tracking._tracking_service.client: 🏃 View run Logistic Regression at: http://localhost:5000/#/experiments/374735653194037029/runs/340dd7682c3146d78e0189629bb6e6e9. 2024/12/06 14:28:16 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029.
tuned_model_tf
LogisticRegression(C=0.472, class_weight='balanced', dual=False,
fit_intercept=True, intercept_scaling=1, l1_ratio=None,
max_iter=1000, multi_class='auto', n_jobs=None, penalty='l2',
random_state=123, solver='lbfgs', tol=0.0001, verbose=0,
warm_start=False)In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
LogisticRegression(C=0.472, class_weight='balanced', dual=False,
fit_intercept=True, intercept_scaling=1, l1_ratio=None,
max_iter=1000, multi_class='auto', n_jobs=None, penalty='l2',
random_state=123, solver='lbfgs', tol=0.0001, verbose=0,
warm_start=False)# plot confusion matrix
plot_model(tuned_model_tf, plot = 'confusion_matrix')
plot_model(tuned_model_tf, plot = 'class_report')
pred_holdouts_tf = predict_model(tuned_model_tf)
pred_holdouts_tf.head()
| Model | Accuracy | AUC | Recall | Prec. | F1 | Kappa | MCC | |
|---|---|---|---|---|---|---|---|---|
| 0 | Logistic Regression | 0.7047 | 0.7919 | 0.6630 | 0.7246 | 0.6924 | 0.4095 | 0.4110 |
| 0430 | 09 | 10 | 10 hour | 10 min | 100 | 100 follower | 106 | 106 flight | 10pm | ... | youth | youtube | yr | yuck | yum | yup | yup go | target | prediction_label | prediction_score | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 3395 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | ... | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0 | 1 | 0.5742 |
| 3396 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | ... | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0 | 0 | 0.6982 |
| 3397 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | ... | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 1 | 0 | 0.7731 |
| 3398 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | ... | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 1 | 1 | 0.6970 |
| 3399 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | ... | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 0.0 | 1 | 0 | 0.7026 |
5 rows × 4386 columns
# Finalize the model (train on the entire dataset)
finalize_model(tuned_model_tf)
2024/12/06 14:30:46 WARNING mlflow.models.model: Model logged without a signature and input example. Please set `input_example` parameter when logging the model to auto infer the model signature. 2024/12/06 14:30:46 INFO mlflow.tracking._tracking_service.client: 🏃 View run Logistic Regression at: http://localhost:5000/#/experiments/374735653194037029/runs/ceb584f72b4f4327995989f0959e8144. 2024/12/06 14:30:46 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029.
Pipeline(memory=Memory(location=None),
steps=[('numerical_imputer',
TransformerWrapper(exclude=None,
include=['0430', '09', '10', '10 hour',
'10 min', '100', '100 follower',
'106', '106 flight', '10pm', '11',
'1130', '11pm', '12', '12 crew',
'12 hour', '12 week', '125',
'12hour', '12hour flight', '13',
'13 hour', '130', '14', '15',
'15 min', '15 minute', '17',
'17th', '18', ...],
transformer=...
TransformerWrapper(exclude=None, include=None,
transformer=CleanColumnNames(match='[\\]\\[\\,\\{\\}\\"\\:]+'))),
('actual_estimator',
LogisticRegression(C=0.472, class_weight='balanced',
dual=False, fit_intercept=True,
intercept_scaling=1, l1_ratio=None,
max_iter=1000, multi_class='auto',
n_jobs=None, penalty='l2', random_state=123,
solver='lbfgs', tol=0.0001, verbose=0,
warm_start=False))],
verbose=False)In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
Pipeline(memory=Memory(location=None),
steps=[('numerical_imputer',
TransformerWrapper(exclude=None,
include=['0430', '09', '10', '10 hour',
'10 min', '100', '100 follower',
'106', '106 flight', '10pm', '11',
'1130', '11pm', '12', '12 crew',
'12 hour', '12 week', '125',
'12hour', '12hour flight', '13',
'13 hour', '130', '14', '15',
'15 min', '15 minute', '17',
'17th', '18', ...],
transformer=...
TransformerWrapper(exclude=None, include=None,
transformer=CleanColumnNames(match='[\\]\\[\\,\\{\\}\\"\\:]+'))),
('actual_estimator',
LogisticRegression(C=0.472, class_weight='balanced',
dual=False, fit_intercept=True,
intercept_scaling=1, l1_ratio=None,
max_iter=1000, multi_class='auto',
n_jobs=None, penalty='l2', random_state=123,
solver='lbfgs', tol=0.0001, verbose=0,
warm_start=False))],
verbose=False)TransformerWrapper(exclude=None,
include=['0430', '09', '10', '10 hour', '10 min', '100',
'100 follower', '106', '106 flight', '10pm', '11',
'1130', '11pm', '12', '12 crew', '12 hour',
'12 week', '125', '12hour', '12hour flight', '13',
'13 hour', '130', '14', '15', '15 min', '15 minute',
'17', '17th', '18', ...],
transformer=SimpleImputer(add_indicator=False, copy=True,
fill_value=None,
keep_empty_features=False,
missing_values=nan,
strategy='mean'))SimpleImputer()
SimpleImputer()
TransformerWrapper(exclude=None, include=[],
transformer=SimpleImputer(add_indicator=False, copy=True,
fill_value=None,
keep_empty_features=False,
missing_values=nan,
strategy='most_frequent'))SimpleImputer(strategy='most_frequent')
SimpleImputer(strategy='most_frequent')
TransformerWrapper(exclude=None, include=None,
transformer=MinMaxScaler(clip=False, copy=True,
feature_range=(0, 1)))MinMaxScaler()
MinMaxScaler()
TransformerWrapper(exclude=None, include=None,
transformer=CleanColumnNames(match='[\\]\\[\\,\\{\\}\\"\\:]+'))CleanColumnNames()
CleanColumnNames()
LogisticRegression(C=0.472, class_weight='balanced', max_iter=1000,
random_state=123)import os
from datetime import datetime
from pycaret.classification import save_model
# Définir le chemin du répertoire courant
PATH = os.getcwd() + os.sep
# Créer le sous-répertoire 'models' s'il n'existe pas
models_dir = PATH + "models"
if not os.path.exists(models_dir):
os.makedirs(models_dir)
# Définir l'heure actuelle pour ajouter au nom du modèle
current_time = datetime.now().strftime("%m-%d-%Y_%H-%M")
# Sauvegarder le modèle dans le répertoire 'models'
save_model(tuned_model_tf, os.path.join(models_dir, "best_model_TfIdf_" + current_time))
# Charger le modèle sauvegardé
# loaded_bestmodel = load_model(os.path.join(models_dir, 'best_model_TfIdf_' + current_time))
Transformation Pipeline and Model Successfully Saved
(Pipeline(memory=Memory(location=None),
steps=[('numerical_imputer',
TransformerWrapper(exclude=None,
include=['0430', '09', '10', '10 hour',
'10 min', '100', '100 follower',
'106', '106 flight', '10pm', '11',
'1130', '11pm', '12', '12 crew',
'12 hour', '12 week', '125',
'12hour', '12hour flight', '13',
'13 hour', '130', '14', '15',
'15 min', '15 minute', '17',
'17th', '18', ...],
transformer=...
TransformerWrapper(exclude=None, include=None,
transformer=CleanColumnNames(match='[\\]\\[\\,\\{\\}\\"\\:]+'))),
('trained_model',
LogisticRegression(C=0.472, class_weight='balanced',
dual=False, fit_intercept=True,
intercept_scaling=1, l1_ratio=None,
max_iter=1000, multi_class='auto',
n_jobs=None, penalty='l2', random_state=123,
solver='lbfgs', tol=0.0001, verbose=0,
warm_start=False))],
verbose=False),
'C:\\Users\\cecil\\OneDrive\\Documents\\AI_Engineer\\7-Realisez_une_analyse_de_sentiments\\P7_WorkingDirectory\\models\\best_model_TfIdf_12-06-2024_14-36.pkl')
# Générer des prédictions pour le jeu de test
predictions_tf = predict_model(tuned_model_tf, data=test_tfidf)
| Model | Accuracy | AUC | Recall | Prec. | F1 | Kappa | MCC | |
|---|---|---|---|---|---|---|---|---|
| 0 | Logistic Regression | 0.7124 | 0.7903 | 0.7062 | 0.7159 | 0.7110 | 0.4249 | 0.4249 |
df = predictions_tf
visualize_nca_predictions(df, train_df)
Faux Négatifs (Prédit comme 0, mais vrai label 1) : - @emmacandlish re-reading new moon. decided im taking eclips on the plane, so i wanna read new moon before it . LOL - Air France 447 inbound from Brazil to Paris disappears... shit, I have controlled that flight LOTS of times... - Sitting in the airport, just got off the phone with Verizon to have them fix my phone! Will be home soon!!! - Did anyone have issues with their 360 ghostbusters flight suit code not working? #xbox #ghostbusters - feels bad for robert being attcked by paps in the airport. also, feels bad for watching these videos. - Was listening to @Eminem and @taylorswift13 on the plane - I'm sad @kacisloss is leaving today. I should have told her it wasn't possible to change her flight. - @BerlyAnne *grins* I am glad to see a smile from you this morning! Packed-n-ready to fly yet?? No rush of course, 6 a.m. flight.. - Thanks to the crew of EclecticRadio and Havana for the great night! Only bad thing is the tiredness at this moment - @aliyaki Weirdly I had that on the plane on Sunday night going to Melbourne, never had it on a plane before. It worried me *hugs* - @ElectriKateD aww, don't worry, I think that his flight should be there very soon - Small plane + Thunderstorms = No fun! But I'm in KC! Just called the cab - @burbankhays open the gate to your apartment complex? I'm just kidding. swing swing swing etc etc. love it! Love you! okay, that was gay - @honeybeetoys OMG 1000 followers! Have just added/edited your links on AB-sorry 4 delay - Going to DFW airport - @philhart @JoHart what I need is a really cheap flight next w'end-will have to have a look - @dannygokey awww danny im sooo sorry!! i feel so bad that really sucks..please do me a favor dont go on that airline again..welcome home! - mother nature, please do not rain and delay @therunners flight. we are cutting it too close to game time and i dont want to miss the game - I will not cry at my last HILLS crew meal! (Yes I will ) - im officially at the airport ready to leave new mexico. im coming hoomee - Wishing I could be at jetset with the space crew. - @spencershell When does your flight leave Thursday? If it's around noon, I'll probably see you at the airport. - i decided to not jailbreak it quite yet as i want to try out 3.0 so i'm trying to just do a standard itunes upgrade.. error 1604.. dang - Dad-Ur dawter is grounded til she's 71! Wife argues but has 2 concede. Dawter (holding flowers) Happy Mother's Day, Mom! #crapsoaplines - Just saw the trailer for Green Lantern: First Flight, awesome! http://movies.yahoo.com/movie/1810077708/video/12839105 - Dropping kakevin to ngurahrai airport. He's goin to SG, and I'm still here. Aaah I envy him for this - I dream on becoming a flight attendant. - is gunna play the wii.ugh tobad that pillowbiter is grounded eh guess thats what u get for going in the drunktank xD haha aww :'( - @colettebett I have 4 more shows this summer. 2 more next week and 2 in July. Sittin on the plane back to Texas now. - @jordanknight Good Morning. I missed your Tink I was downloading the Uber Twitter upgrade. It's great! - @jellyybeannn :o Idk y, but I luv the airport lol. Lol, i'm old - @kaitibug For a wedding! In fact, currently on Virgin America. Crazy airline! Weeee wifi in the air! - @ChrisTFT did you leave room in the luggage for me?! - not excited about getting up in 4 hours. Or the prospect of being at the airport at 4:30 am on Friday. - Why do I have to work during the entire LSU game? Thought I'd get to watch the beginning at least but a rain delay has prevented that - @harbars I was going to say have a good flight but perhaps - "Have as good a journey as having to cope with other people will allow". - @agriswold Lady! We still need to have a housewarming party with our crew! - Sorry for the delay... I'm back home This #sotd fits my mood today for some reason. â« http://blip.fm/~7r2zh - extremely organised airport though, everything is easy to find, everyone is calm, its very nice... couldn't find any special cakes though - CNN: Brazil confirms plane crash (duh) prays go out to the family...horrible tragedy - In exactly 24 hours I will be seeing the girl at Kansas City International Airport... I'm very excited!! - @Darine Have a safe flight dear and ENJOY - Air France plane from Rio to Paris missing. Crashed ? Maybe just me, but I'm nervous... Hope they're ok http://bit.ly/15Y5iK - Early morning airport blues - http://twitpic.com/6qnxy - On the plane to Buffalo to visit GG! - @FrankAdman i saw you several times, but you too popular. i'm actually on my way to the airport. #140conf - trying to find the cheap (coach) airline tickets @ Tyrese4ReaL was talking bout, not cheap to the cheap $500+ - About to board plane to SG.. thank goodness for lcct wifi! Gonna miss malaysia - About to board my flight to kona! - Hey @hawkcam ive been away for a couple hours - sunny did you confirm flight? if so woohoo (hawkcam live > http://ustre.am/2f9i) - grrr... Jetstar's website and phone service doesn't reflect today's flight changes! It's called the electronic age, people! - I am very saddened by the tragedy of Flight 447! You have no idea! - Both sons have stepped up to go to the airport. They are lovely and I take full credit for training them! - I'm so sad about the Air France Jet crash. It's crazy, the plane just disappeared. Poor travelers... http://is.gd/LKkT - @billyraycyrus you amaze me I love how kind you are to everyone and you seem totally grounded - is on her way to JFK airport..next stop GREECE! Be back the 23rd! - Have to clean install 3.0 on my iPhone. Upgrade left too much cydia crap around - In case of a water landing your seat can be used as a flotation device. That term makes more sense now.I hope we're not late 4 the show - @eachnotesecure Yeah, I agree, but we can't afford to upgrade the car radio just for the 15 minutes we're in it every day. - At the airport waiting to board my flight gunna miss my Pila! - coming home tomarrow, not looking forward to my flight at 5 in the morning - iTunes now mess up my play count for podcasts since the 8.2 upgrade - @Nigelclarketv aww cool. Transformers was crazy. We performed til 1. Partied til 5. & just caught our plane home!! Haha good times - is never thought i would be grossed out by lindsay lohan topless- Got an A in OB so out celebrating with the crew - Off to the airport to fly a plane solo again first have to drop daughter off at her new school... - chilling in Cabo. Weather's beautiful, plane is idling - @ the airport saying farewell 2 some grobie friends - Loving Lexington. At airport getting ready to leave and already looking forward to the return. - Said goodbye to Ash at the airport today have a safe trip bud! - 'OMG Damo quick, it's Concorde!' (turns out no, not Concorde but Vulcan) smaller apparently, not much of a plane expert! - Good morning everyone! Okay I pinched my finger this morning with my seatbelt, now i have a little blood bubble on finger! Ouch! - Tuff luck o well gonna make my way to LGA now! Don't want to miss my flight!!!! - Getting on a plane headed for some days of shopping in the windy city with mi madre - @ktsummer where u goin??? I wanna go on a plane somewhere - @cmogle is there an upgrade? my software update doesn't show @futurescape Am on 10.4.11 but Safari 4.0 isnt behaving. - leaving to go shopping ! my last day in thailand ! leaving TODAY ! actually tomorrow about 1 am, but i have to be at the airport today ! - The first thing I smell walking off the plane in Nashville - Barbeque! - In long beach our flight doesn't take off until 4:50 but I'm almost home, Mexico was AMAZING But damn I'm bored I'm the airport - just said goodbye to Trev I suppose to be on plane next to him...life is just unfair sometimes - @chrisjsimon Naks! Japorms na japorms... Now get back to your seat ang fasten your seatbelt! Pasaway! - For anyone waiting for results of Telegraph #photog comp, so sorry for delay I'm having Internet issues. It's broken will post asap - Supposed to leave in 20 min. Plane isn't here. They haven't said it's delayed yet, but found the plane with 20 min until landing. - Ah man. I cant wait! Just about to board a flight to Florida - going to airport... will be headed to DFW then DIA... goodbye Texas Tech! - Finally free from my flight. Cool temps in LA! - Getting packed for my flight home this morning. I miss seeing all my church peeps today. - is awake bright and early and about to head into traffic toward the airport! OH BOY! Disney time in T -5 hours!!! - @jancornelis wish I could bring an assistant ! Too bad airline fees aren't very cheap - @TomFelton Safe flight home to you and Jade XX - has had a magnificent time in Galicia. Wine, grappa, liqueur now jammed into suitcase. Off to the airport! - @compsolutions "Bus to GongBei from Airport takes about 40-60 minutes FYI." - remember to look us up when you head back! - Waiting to board my flight back to NY. I had 2 hrs of sleep & 3 muskateers for bkfast lol womp womp - @ChuckSmith ah @iphone_dev just told me that it got easier to upgrade - @msdrpepper @hashiphone @sleestakk @mizzbbri Thanks for your help. I just checked my AT&T acct and I can't upgrade until 03/2010 - Deja is @ the Alanta airport..she leaves in 2 hrs for london I miss her already, Atleast shes still in The US for now! - up early, heading to Heathrow for our WWDC flight - On the ground in Houston on my way to Portland for an MCN board meeting... Woke up at 3:30am for the flight - booking my flight back home soon - Watching The Soup and getting ready to take wifey to the airport. - Tests Over In 5 Days. Then Hitting LAX Airport In 18 Days x - Nerves have really kicked in. Haven't got my passport and we go in a week. Just called passport office 4 upgrade. Didn't seem too helpful - my ipod touch died after upgrade, now cannot bring to PNG - Plane diverted back last night to Chicago due to med. emergency. Plane due out a hear at 4:45 pm. - i feel like pasta haha. sooooper dooooper bored.nothing to do grounded! ugggh. i have the rocklobster song stuck in my head.bahahaha - Whoa whatta landing- hello sunny san diego - @Zeeenia i was thinkng more on the lines of stayng at the airport and then followng them to their hotel, then findng out their room no. - The storm is here. Rain delay for the game. Glad I am not at the game tonight. Fans being asked to clear stands because of lightening. - Getting ready for the early flight... joy. - On the plane waiting to take off - Booking my flight back to Atlanta!! - do you have problems with !kde 4.2.3 and Radeons? after upgrade my X consumes over 50% in state of idle - Rain Delay for the Rockies.... - @jane__ I'm awesome! my parents grounded me from da computer during the week but not on weekends! - Landed safely in Florida. @grantdaws didn't fuss at all. Everyone on the plane was impressed with how good he was. Me = proud papa. - @BlissLauderdale Ughh Ive heard!! Unfortunately my flight leaves Thursday night!! - reserved a plane ticket! The move date is SET for July 27th! #fb - Sit'n at the airport, bored and have the damn munchies - just got on the plane. yay! first class fun. haha! okay i'm turnig off the phone already. bye! next stop vegas. - I would be leaving on a jet plane ... if my flight hadn't been pushed back two hours. - getting ready to go to missouri and michigan for two weeks. gonna miss him and im scared to death for the THREE plane rides it will take. - @EktorOni Buscarme al aeropuerto!!!! Gave you my flight info and you said that you could... - On board my flight...starting a new book, The Alchemist by Paulo Coelho...it was recommended to me by a friend. Tweet ya l8r! - Grounded I accidentally closed my sister's finger in the back door, so I'm grounded from going outside today. Oh Well. - @underoak did you upgrade your #iphone yet?! It's pretty sweet, a little anticlimactic since we still cant mms. Boo AT&T! - @Carrieisbarrie Hey yeah i'm ok thanks & yeah she did get away ok, i cried all the way home from the airport how r u sweetheart? x - @jahdog707 That tofu looks amazing. I need to catch a flight to that restaurant right now!! - @HeathE2003 i wish you could come! maybe you can hop on Zack's flight..we just won't have a fam vaca tee for you... haha - Empty plane! Luv it! I get the whole row to myself. But forgot my drink coupons at home again!!! - Going to the airport soon - @Geekvibes Yup I'll be there to and will go to airport from now on! - @MattJacobi Oh hells yes! #first class flight - Waiting for my luggage so I can shower, change, get my Starbucks and then head out around Dublin!!!!! - i opened the gate for mom then it rained so hard and im all wet... it was fun... i wanna do it again... hahaha... the rain stopped... - @LizAnjos I'm staying at a hotel right at the airport (CDG) the first night... Then I'm staying at http://bit.ly/746qg - In Toronto - plane to Newark likely to be delayed by one hour Good job I decided to stay overnight in Newark tonight and drive tomorrow. - last nite @questlove proved to me that there really is no such thing as too much stevie wonder. i woke up in my roots crew tank, smiling - @paulineANNtan you should've booked a flight! - @thesldude86 I never knew you spoke to the Aussie TODAY show Crew - Flight KM146 claiming bags already - just woke up, felt good to sleep in But i still gotta do my hw before my mom comes home or i'm dead/grounded =/ - @RecipeRunway So sorry for the delay. The cooking part is easy. The posting part, not so much. Now on to Strawberry & Prawn Salad... - : The US Open is in a rain delay... sad. - @FeliciaSlattery Happy, Happy Birthday to You!!...have lots of fun jumping out of the plane..hold on tight - You pay too much for food @ the airport AND it doesn't even taste good! - Nervous for the online check-in day I have to do it quickly. - In Val d'Or, waiting for my wife and youngest's flight to leave. a long drive to Chisasibi for me. - @lrkane I like the film 'Flight of the navigator'. Does that help? - @Blancoei yeah that wud b crapy great! no ticket, no baggage, just lot of cash, toothbrush n' passport great, no? - running off to the airport, I'm gonna miss my ladies so much - @xxandip I've been up since 3 damn Glasgow airport!! - The weather is SO nice out and I'm at the airport booooo Had a great time in NY!! Gonna miss you all!!! And I miss him already ::cries:: - there were 3 slovaks on the plane that got lost http://is.gd/Mcel - @jimmuncie Pretty good I mean, I'm exploring this whole country music thing - definitely a calf looking at a new gate. - @faultlines Ah, finally. Sorry for the delay. Wala akong season 3 pero check ko. Baka nasa Manila yung files ko. - @lrwher couldn't change my flight! stuck in chicago - @DaySpringCards So glad to hear that! I'll be emailing you shortly. It's been a hectic last two weeks..I apologize for the delay - @shybutflyy np what's going on with ya? New member of the late night crew I see. - @PBSmitty @EvilZen says that there's something wrong with Twitter. Delay on sending her your request, but she'll add ya! - @SaruhMai sorry im grounded - The Air France plane is scaring and depressing me today. - just got home from.. Mass, mommy, west gate and super market bought a collar for toby and basti - @JCTurner Lucky you with the 3GS! I can't justify the upgrade expensive holiday looming. @stephenfry review in The Gardian was good? - @SouthwestAir Your rapping flight attendant should definitely MC the video- I hope i'm on one of his flights soon http://twurl.nl/23h5bo - Air France flight AF447 crashes with 228 passengers. Feeling ...May lord give all strength to their families and friends. - watching flight plan in rcti...cool! - out the house heha :L duno why i dreamt that but yeah and i also dreamt that a flight tracker thing said that our flight had crashed :S - @idubbs I'm almost afraid to know. It makes the waiting for release/upgrade so much more painful - On my way to the airport to pick my brother up Using the mobile broadband to stay connected. Love this thing lol. #Jordy26 - Off to the airport for Maria's album launch - En route to Yankee stadium..meeting the rest of the crew there. Sorry I'm running late gang! - The director of the show said you can judge a runway by its breakfast - @jimbofin this will be one more reason for me to visit Madrid again. may be even quite soon. have 2 catch the plane now - My friend works 4 Delta @ JFK airport, flys 4 free & missed her flight. Now she's on standby 4 another 1, pray she gets on! - I will never miss brakpan lol now a glorious 2 hour wait for my flight. Meh. - just dropped my baby off @ the airport - Getting my hair cut by tam..then taking her to the airport - jesus poor people on Air France flight, I hate turbulence but they say planes can withstand that! I fly in 2 weeks - yay @Firemint Flight Control update came through on iTunes - @DavidArchie you have such an awesome voice! ur so cute at the airport in the Philippines! and amazing @ the concert! - reading about the plane in NJ ! how sad - @CoalEO Hi! A Kiwi? Flight of the Conchords fan by any chance? - Never fly with IBERIA airlines... they lost my luggage - PLEASE PRAY FOR THE FAMILIES AFFECTED BY FLIGHT 447. PLANE DEBRIS WAS FOUND IN THE MID-ATLANTIC. - this is the third time I'm going to be on a plane taking off today - has lost her driving licence = no ID It sucks being grounded from the pub for up to 10 working days! - going to the airport to pick Liam up - The Hangover: That movie is gonna be hilarious. wish i could see it with the crew - Phoneless and sad. the iPhone upgrade ate my SIM card! - Heh "FBI: Terrorist Attack on Golden Gate Bridge May Have Been Green-Screened" http://tinyurl.com/p5kyst - @graywolf you should probably do one more USB stick and swallow it before you get on the plane - so you get through security - Looks like I don't qualify for iPhone 3GS upgrade pricing until 5/10/2010 - Rogers fail already! Customer central not showing iPhone upgrade for me - i just booked my holiday then realised i will be missing my graduation.flight non-refundable.no graduation for me mum is MAD! - @esmeeworld have a good flight where u headed??! - On the magical express on our way to the airport - 90% packed 4 Vegas. Still can't believe I pulled off a priv plane 4 $100 person. LOL Wedding this wknd, Kaskade, pool & cocktails Oh yes - Just got charged $30 4 freaking internet at Sydney airport! Nothing to eat either... except... KRISPY KREMES! Just swallowed a half dozen - is in the studio editing last 2 shoots. Then it's pack my bags for a flight to Singapore in the morning. I *heart* moving around. #fb - so i was attempting to upgrade wordpress to 2.8 and it totally hosed my web server - URGENT -- Air force officials say they have found bodies and debris from Air France flight 447 which crashed in the Atlantic. - flight delayed for 10 hours! madness, only 1 seat left in next flight, business class see you later Wally! - @robinson1970 poor Grant is delayed as usual at the airport and NO JAY!!! - Boooooo! Still on the runway... An hour later I need to get home people! - After almost 4 hours on the runway our flight just got cancelled and I have no idea what I am gonna do. But at least, I'm still smilin - chillaxin. grounded - No Iron Maiden Flight 666 DVD in the post today Will have to wait til Tuesday now as got it posted to work!!!! Plop!!! - Waiting at Orlando airport...an hour til we board and the kid is already a nightmare. Great, flashbacks from our flight last year - Flight got delayed due to rain - another plane down......................damn! so sad - Listening to "Cross the line" by Fiction Plane... Greatest song ever, better than sex, better than life... About to orgasm, nuff siad - delicious fusion mouthgasm in Sausalito after biking over the golden Gate bridge - @PamAtherton You are right, audio interfaces & microphones are always interesting to airport security - I can't get past 79 on flight control. - Flight is toast--cancelled! I'm now re-booked for tomorrow and heading *back* home. What a great way to spend the last 6 hours of my day! - my husband should be landing in Denver soon. I wish I could've gone. - @HamishGraham haha it's all planned out, my friend jeremy is picking me up from the airport and my bro is leaving his door unlocked - @simon Indeed. If you commit to 3 years can you upgrade during that time? Thanks for the respose Considering options for @getOnePage - @pinkhazebfly @JazzyLamby hahaha I Love you guys we the 8/9 crew hahaha PinkHaze was On IT tonite/mernin! @MariahCarey goodmerrnting/Nite - W8ing @ Dubai airport, HK is better... - @lancearmstrong how long does your flight take? i hope you arrive in time for No. 4 - The news is always so depressing I feel for thoes people on that flight. - Leaving in FOUR DAYS for a 12 hour flight - Off to bed; good night everyone. *although I feel terribly sick....atleast I'm going to bed happy because the BREW CREW won tonight! - Plane crashed in france - Leaving to the airport in 30mins - i am sleeepyyy. today = busy. work 10-4, library, class 6-7:20, tanning, packing, bit of sleep, atlantic city airport, myrtle beach - Flight of the Conchords was awesome! Had sweet seats 3rd row courtesy of Luke's friends. No pics though Might... - http://bkite.com/07ywb - Adelaide airport - everyone now has been waiting 45 mins for bags.... From my flight only, not happy - Waiting to head to airport? Not enough time to actually go out and do something but enough time to render me utterly bored - on my way to the airport - So proud of SCC! " Love SAA and my fav friday crew from kerck! You guys make my life . - @el_friendo I have done a couple here and there but damn school has taken a lot of time away from it! I've CS3. I should upgrade - @KevinFrankish Good morning Kevin and BT crew FYI Dufferin Peel Catholic School Board still have one more day of school - @kinagrannis booo, that's not fun!... have a good flight - Misses the DB crew - In plane on my way to texas - Back home now and getting into tiding up my flat. My luggage was never found. Ah well. - Mmm..Guys..I have a questi..What do you think is a good design for an airport?..Wish you'll reply to my question..thanks in advance.. Faux Positifs (Prédit comme 1, mais vrai label 0) : - Just wrapped! shouts to @marcclark and the crew! these shots are sum of my new favorites... got @musedandabused somethin silver... - Stuck @ the gate friggin' tired as all get out.... - Just dropped Josh off at the airport - Almost done packing, I have to be at the airport at 12:30. Then DALLAS! I bought the new Jonas Brothers CD maybe I'll see them down there - @tommcfly Hey tom put me in plane with u and the guys i want back to brazil for see my family - @galaxydazzle I want to! if I book today I have a flight for 10 euros... but i need a creditcard for it! and somebody to go with - @eesti93 I was! We were delayed sitting on the runway for an hour because of the wind and rain. - Is going WI but will be back in AZ on Monday! Yay! Going to watch Friday the 13th on my iPod on the flight. - upset stomachhh GROUNDED! &jealous, of some hoooee. :/ iiii wwaanntt hhiimmmm,</3 - keep gazing at my one beautiful shoe its lonely wasnt found in taxi means its crying somewhere along friar gate me too sob!(ok im not) - @agent242 just got it...guess there was a bit of a delay with the direct message. thanks muchly. - @JeepersMedia can u buy the flight for me?" cuz i cant go to NYC israel is too far - .....LOVE free wi-fi at the FLL airport!!!! - @jayncoke ooh, On a Plane would be awesome! Can you please send that to Andy Samberg? - Somehow I managed to get my flight bumped up early enough to make it to game 1... and I was able to keep my first class seat! Woo hoo!! - At the airport problem with our tickets - On the plane now. I do quite like Easyjet. They get a bad ride cuz they are budget, but I find them to be pretty proffessional - Listening to " hey you" by pink floyd.. Ha! Takes me back to boarding school. Oh the pain! - Just landed in Tucson. Didn't really sleep on either flight. anyone know of a good place for lunch? - O.K TWITTERLAND IM OFF 4 THE NIGHT/ EVERY OTHER WEEK IM OFF 4 A FLIGHT/ I CAN SEE MY FUTURE/ IT'S RIGHT ABOVE BRIGHT/ RIGHT ABOVE A KITE - Just landed on philly. And my ears are stuffed :/. one more plane to go then ill be in hotlanta - @_CrC_ sounds awesome to me. when we flying? do you serve food on the flight? let me guess, tacos - Good LATE nite/EARLY mornin Twit Fam...on a plane back to So. Cali in 10hrs.. :/ LOL - ZOMG i was just rubbergloved at the airport on suspicion of swine flu - Yay, 2 new flight control maps coming in the next update A beachside runway and aircraft carrier - @jtimberlake Next time you come skiing/boarding here in Utah, you should give me a tweet! There are some cool spots only the locals know - Kayak.com hasn't been finding me the best deals lately Just found better deals on Virgin and JFK for a flight on their own site. Hrm. - My feet still hurts. But I seriously miss walking the runway. I wanna do it again! :| ) Still have curly hair. - Taking my baby to the airport. - In B-town. Flight delayed 4 4hrs Did dry run of my pitch to the Westcon Group tomorrow. They have a confidence monitor. Whew! #TheVibe - Hey Joey!! New Zealand, huh!? That's so awesome! Have a safe flight Can't wait to see the amazing photos y'all take!!! - Ok... finally got some form of internet set up! Sitting at the gate. Checked Google Maps. Hollywood's 30 mins away only? Tempted - Thx to everyone for reassuring me about my flight tmrw. As travel planner, I know it's safe. As gf/daughter/sibling, i'm a bit nervous - Can't upgrade iPhone to OS v3, something about "server not available" - is tryna find her a 30 yr old junt with a 401 k, stable job, good home with an extra room 4 me and my baggage..so tired of u young dudes - My sunburn delay me from working out.... I'm still tender... - so sad to go back back to philly philly in the am...until next time dnc crew... edi teddy... and such! - Not a good day. Took my sweety to the airport early this morning. Gone to Israel for sisters wedding. A whole week sooooo hard. - I feel so sorry to the friends and families of the ppl who were killed in the Air France flight. - At the Miami airport checking out magazines...about to fly to Venezuela to visit some fam...I'll miss my tech but I'll love the food - so, i just found out that i've got enough frequent flyer miles to get a FREE plane ticket to nyc! life is good - @MamaMisfit Yea but im still grounded for life and i ain't gonna make it to this summers performances - Trying to upgrade to 3.0 and getting no where. - dammit, forgot something at home... cant sleep and my flight's in a few hours - @tommcfly Hope to see you at the airport todaaaaaay! sorry about me and my friend welling yesterday when you arrived. love u - Flight is booked solid, no window seat??? OK, that's going to suck..guess I'll make up for some sleep - Packing up suitcase. Flight leaves today. Aww...sad to say goodbye to the beautiful beach - Getting ready for drive to airport - @mechangel Safe flight sweetieeeeee!!! So sorry I didn't get to see you before you left - I think my flight is delayed its not here yet - haz her gown ordered today. and dropped boogie off to the airport. and now.. laundry galore is to be continued. - Why did I book a flight for 6:50am tomorrow? - ZOMG I just figured out I can finally upgrade my phone to the iPhone :] I'm excited now Anyone else use the term ZOMG? Reply with ZOMG - @RiskybusinessMB i wish i was in dallas. My bffs are landing there soon for a layover to arizona - @sealdi airline booking? like @noreen's problem with Cebu Pacific... - Flight b4 us got canceled hoping ours at 3:30 won't Otherwise we have 2 drive 2 Dallas 2 get on R connection. http://twitpic.com/3j4p8 - @dannygokey Hope your luggage follows you this time! - at the airport. Have a nice trip fikri..All the best to you - well bye guys if i can i'll call yu at airport or on way to there hmmm longest three weeks of me lifee ily. - @dutchiegurl aww ur welcome. glad u had a nice night! hope ur having a safe flight. will keep in touch with u over fb/twitter - @joxlan Yeah you lucky thing! I have to pay quite a lot to end my current contract early for the upgrade it seems - I've boarded my plane, and they just said to turn phones off so... Bye - My flight is delayed I just want to be home! - Looking into freeware flight sims . Any suggestions? - @frankmartin you know, my tweetdeck "broke" after upgrade - lunch done and connection found in Madrid Oracle CVC so sorting out mail etc before heading to airport for trip back to blighty - @6stringhero Have a great flight and tell her I said hello from S FLA. And please rock and roll for me I love it!! - Hmm, @NicoletteTay should be on the plane now on her way to Melbourne! I'm going to miss her loads! - Now that their is a more local airport its nice to see air shows in my hometown - @Noadi @lila82 It jumps over the gate and it's the kids playroom, so they need to be able to get in. I'll have to try the squirt bottle.. - Off to the airport. :] Cyprus, HERE I COME! - Sitting Ready Reserve at ATL airport...hoping to fly out but kinda wanting to stay home today - Since tweeting about my lost luggage, now followed by co-founder of Trace Me Luggage Trackers. Now if only I'd known about this before - BTW, I had a diet pepsi, not beer!! Guess what? Our flight has been delayed!!! - FUN FACT! I basically have 2 airport uniforms. One for warm weather and one for winter. So if you see me at The airport, DON'T JUDGE ME! - @Kutski Quite pissed off i have to wait for the my upgrade to get the new handset though - Is watching tinker bell nd still grounded - It would be cool if there could be an upgrade. Like more bosses and stuff - < 30 minutes from curb to gate including checkin/immigration and security. Course the flight is late - we are at orlando airport, flight now scheduled for 8:10, bought freeze dried ice cream from nasa store - Air France confirms plane crash... 228 feared dead - Flippin heck, the upgrade price to Windows 7 Ultimate is stupidly high. Not exactly encouraging people to upgrade, Microsoft. - @DJ_AM have a safe flight!! - Now I am leaving at 10:22 PM on flight United Airline 44. 14 hours lay over in the LA airport. How Nice. - The flight was full, so now we have to wait till 1 o'clock before the next one!!! Boo!! - Touched down back at heathrow. Now for the layover. - Crap. There's a hipster judge on the plane and I'm not being cool - loves that it is light out at 5 am. last bkfst before summer with science crew. then off for more Nemo. I'm gonna sing more today. - I want to go to Disney World and meet Mickey! But no by plane, it scares me! - Wow...this is a small airport...I'm spoiled by JFK. @junkprints You feel me! It's hard to find good help these days! - @ShawneyJ Aww, sorry you're bored hon! Play Airport Tag? Which airport are you stuck in...some are better than others. #ballashoughband - Praying for all the family and friends of the people from the flight outta Brazil to Frace. It's a very sad day with this tragic loss - @DavidArchie Hi Archie! All of your fans in the US are waiting for you there! Have a safe flight back to US! - @omfericit Yeah Can't wait to get on the plane. I'm so tired...Black Sea on Sunday, yes! - @theflyingpinto thank you so much, i'm going to need some friendly faces in the crew room i'll let you know when i get my transfer - Missing Air France plane may have been found. http://www.cnn.com/2009/WORLD/americas/06/02/brazil.france.plane.missing/index.html - Jakarta Globe apps not working at all after 3.0 upgrade and app update - @Jajawilk ya? well jump on a plane and go! i just cant w8 for the weather. ill be lonely til august! boo - On our way to the airport... then to New York. - at the airport on way home. - @jeffpulver have a nice flight - @thefloatingfrog very good! I've still got loadsa contract left before upgrade - New luggage... new clothes... new haircut... I'm on a roll! But now I'm tried also - looking at all my old myspace status' oh mann. Skyrockets in flight! afternoon delight! AAAAAAAAfternoon delight! - @chrisfinlay Etchells are still landlocked but dinghies are sailing. Looking for a crew for Laser II but too cold to capsize right now - @TheMandyMoore Hee! I'm about to board a plane to Vegas myself! Hope you have a good time! - @masukair_king quest crew rocks my sock! and jabba... and kaba! - Bedtime. Early flight tomorrow. - you so much flight was ok thanks. thank you so much for last night. hope all's ok and i'll ring you soon. i love you and miss you xxxx - Everyone pray for the people on flight 447 and their families. What an awful situation!! - Five minutes and counting Then I get to pick my mom up from the airport - @jessnextdoor yea... sad noh... when I heard about the missing air france I prayed na sana the plane just landed somewhere safe... - @abc7 at the Los Angeles Chamber Orchestra concert at Royce Hall. Little delay, but no damage. - @TexasGirlSari back and better then ever was able 2 save all my old stuff and upgrade x10 wat out u partna - @BeckyW_ "Americas Best Dance Crew" - Nice to know I'll have to check them out on YouTube. - G'morn Monday...The camping princess crew survived!!! Thanks for the memories and NO I won't camp again!! - Finishing up OMA mtg before heading home. #Aer lingus gave me inside seat not aisle. So have to dash to airport for reseating lottery - There's nothin like sweating your ass off on a plane during the summer while sittin at the gate.. The devil is beside me with ice cream - @MrPointyHead jealousy is the first sign of one's success! hope you get a flight soon to this beautiful Europe - @chrisbrogan The back of the plane is where all the whafts of stinkiness collect. - @bretharrison You know I meant Grounded, not Grounder. - is frustrated with fake replies to the missed connection I had - Flight Simulators are not my friend. I'd be awesome if it wasn't for all the crashing. - is in the airport in Philly annoyed...my flight is delayed - @christiancuervo hahah ily, how was the flight? Send me a pic of how they make PS3's - is taking Alix Eve to LGA for her flight to CMH for her overnight visit to Kenyon College. It's supposed to rain. - Grounded.. SUCKS - congrats to @samsameni for landing Casey Thompson!!....she's almost as good as our chef - We're ALL so blessed 2 B here. 05 plane to is VERY NEW, shows that anything can happen. God is still God, it was time. - stressin...flight in 8 hours - #robotpickuplines----> You upgrade me. & You make my synchronous gyroscopic modulaters go pitter pat! <--------Yeah? Yeah! - picking up someone @ the airport - At the airport happily awaiting my flight back to Houston and my sweet @stephenhadley. So great to come home to the love of my life - first flight delayed. At airport bar with Boss and coworker having a drink. Starting to worry that might miss connecting flight. Ugh - @DonnieWahlberg if i had a plane now id get there for sure - Headed to church in Beijing. :-] Then l o n g flight - Heading to Bill's for breakfast with the Realmac crew - Hi @YDURNAS very sad with news of flight 447 that crashed on the Atlantic from Rio to Paris - Current 3G owners CAN upgrade to a 3G S for an early upgrade price, just add $100 to everyone elses price - @SLessard Hanging out at the gate...come say hi? - @LogiBeer Oh yea, thats a great idea and then just concentrate Sunday with the editing and only the editing crew, excellent! - flight is as much my fault as anyones, i should have paid attention to the date not just time time. i was so excited i failed to validate - @JonathanRKnight You still on the flight? Talk to me - @EstJesusNoWhere there were about 27germans on the plane.. but no matter what nationality. they are human. but i guess theres no hope - I am waiting and literally running a countdown for Iphone 3.0 OS..to arrive for upgrade .. - Landed and waiting at baggage. - Heading to the airport - Back down on the Quay watching Windows timers again!! BORING!!! Off to see Iron Maidens Flight 666 movie in Portsmouth tonight though - Off to see Dutch Family Robinson today. Horrible weather for flying on a little city hopper William's (age 1) first flight too :-/ gulp! - No seatback TV this flight good thing it is only about 50 minutes. @ Concourse A http://loopt.us/23Tucg.t - checking in online for my flight tomorrow easter is almost here!! - TWILIGHT CREW AT MTV AWARDS. WOW. LOVE KRIS AND ROB. <3 - @loisyoung92 im getting an upgrade on my phone - AirAsia flt BWN KL delay for 45min -- mmm hv to wait until 9:30pm - Enough philosophical talk... in a Travelodge in Manchester, waiting for the taxi to come take us to the airport woot - sitting in narita airport with a midi keyboard composing a soundtrack... ain't technology amazing!!! - @deannapappas aww what a bummer! well atleast u know u won't miss ur flight ;) so u gonna go see "vegas" - on the plane & in the last row but close 2 bathroom & I have tea + 1 L water! still on time! #C2EA #YAI - Air Can doesn't allow online check-in for Expedia tickets so, combined w train malfunction, am not sitting on aisle for 1st time in ages. - @dannygokey Ok that just stinks!! Where did your luggage travel to...or are they still trying to figure that out? Bummer. - needs to download an upgrade to my blackberry so it can run smoother however, that means I will hve 2 give up an app...ubertwitter - Detroit Rock City. Back in Eastern Standard Time. Can't wait to see my airport picker upper. - J's laptop is in AppleCare and she's on mine this weekend. This would have been a perfect N97 moment. Damn US launch delay. - @mandee_k Flight leaves in 2 hours I'm going to miss Spain (and Joshy!) Will text from T.O. Take care this weekend Mands! talk soon xo - "I GOT PLENTY MONEY" sounds incredible in the club.....Prepping for my flight back to NYC Don't wanna leave... - really wants to camp at download so she can watch flight 666 with her boy - @stevesumpton yes I do agree! Flight was good, got to sleep most of the way! - @lyndalpn Have a nice flight - In the airport again heading to az to see my family...wishing I was still with mike tho - Have arrived in vegas! Our FA on @southwestair sang to us on takeoff! "Fly me to the moon..." it rocked - chilling at will apt. fab. view. close to the golden gate. watching american idol!!. cant believe ADAM!!!. didnt win!. - taking grandma to the airport - going to the airport . going home super excited - @vibzfabz i'd like to but i cant afford the price.mayb i'll go to the airport and stop the team ah, idk. hope you get that ticket as ... - Man another vep performance let's get it. My heart goes out to my crew sorry about the family issues I'll cover your work today stay up - Compiling all my music for my new Ipod...since I lost my old one on my flight back to Miami - @klm are you kiddin' I need free wireless on every flight, no magazines - Sigh. Still 4 more stations till my girlfriend Or my girlfriend's plane at least. - @beckyhope okkie thats nice of you becks <3 well, my flight was just about 1 and a half an hour, but i had such a nice steward... - How 'bout what I did? Had FUN playin' w/the children! Swung at baseballs, hit a few,lol. Flew a toy plane and broke it - landed at SeaTac. The baby was much better this leg of the flight, thank goodness. - The Air France missing flight does not look good. - Oh FUCK. the mini usb has stopped taking a charge reliably now. This phone may not last me til mid-july upgrade - @DitaVonTeese La Duree in the Paris airport???? Good to know - Watching CNN news - hope they find that plane! Not looking good though - That was the smoothest Tiger Airways flight I've ever had. Thanks - @billpalmer Yeah, not super-gee wiz. I hope you are right, but even an incremental upgrade would be fine with me - Didn't get a free upgrade on my first flight as an airtran elite miles member Maybe on the way home. Four hour flight here we come! - Can't believe my wallet was stolen on flight - @takisoma if you were my kid you'd be so grounded! - Just purchased my plane tix to the UK... unemployment isn't so bad after all - Dropping @bethlattin off at the airport Then back to work tomorrow.. - Off across the oceans again...hoping for a smooth flight with no lightening please...RIP to those poor folks Tweet ya Thursday - @MistressB So am i let me tell you..not looking forward to 14 hr flight, but it will be worth it - @Jonasbrothers what day r u landing on Spain? My bff @vickyhill and I got a surprise 4 u and demi. Please answer. We know you'll love it - Ha ha, 'Flight of the Concords' cracks me up - Monitor at Chicago aiport says my flight to Raliegh, NC is "on time" - pray that it stays that way! - @shaundiviney the male host on my plane thought you were cute lovey - He's leaving to the airport and will be here in approx 13 hours. yippie! yay! - SHIT, have to wake up at 6:30 tomorrow Stupid early flight - @svcotton Indeed they are not - be there shortly // Take a British Midlands flight maybe from Dublin to Grlasgow or Edinb. no worries. - Waiting to board the plane. Frustrated that Safari on my iphone not picking up the wifi providers home page My ASAS eee works fine - @jacqui_cooper The last flight in the Space Shuttle program, and I want to watch the launch. I've never seen one live. - Such a shame about the Air France Flight 447 - still at the airport - On the plane about to take off back to minn. I am really not ready to come back yet - when its my birthday i already have to go to the airport at 05.30 how terrible! - got a flight change, still not looking forward to leaving - so many kids are going to be on this flight T minus 65 minutes - @katyhaggis LOL yep. My flight left around 7:45 -already lost some money too! - Flight of the Conchords and sleep. The Hangover was fun - Pray for those who were in the Air France flight - In Paris june 2,3,4 will spend as much time in the plane than there - Last night in Japan. Philippines flight tomorrow. Maybe our plane will disappear over the Pacific and I can live on the LOST island. - Just arrived in London from L.A, long + boring plane ride...not fun - @citizen27 it has been too long! was thinking nice dinner in t dot - will shoot an email to the crew this week - @MusicIsHealthy right girl, and than the whole west coast lets book a flight we'll meet at the airport see ya - @tommcfly hey tom! hope you have a great flight! can't wait to see you in Chile!!! - @UrBaN_eLySsE ihad always wanted her 2 do an album like that 1st disc. iLike Video Phone/Diva/Upgrade U Beyonce, but Halo Beyonce is - @Contra_tss Yeah, but they building a train & there's always some thangs wandering around that airport & the hotel is inside the airport - @matigo Good idea. No hot drinks before landing - And theeeeeeres the turbulence - My baby's leaving on a jet plane. Dont know when i'll see her again... Oh ok i do. Next wednesday. Gonna miss her though - Ok, I made it on the new plane I'm lame. I need to be more spontaneous.. If only I didn't have something important to do tomorrow.. Damn! - @fludwatches yea wiilib beer garden was dope. gotta try astoria one too! i just sent homegirl off to airport my room looks mad empty - Scratch that. It was an accident. 1 hour delay - Waiting on the plane tot take-off. Pilot announced an hour delay http://bit.ly/14UGvR
Dans cette représentation et ce modèle la classe 0 et la classe 1 sont détectées avec le même niveau d'exactitude, par contre il n'est pas plus performant que CountVectorizer.
mlflow.end_run()
2024/12/06 14:38:28 INFO mlflow.tracking._tracking_service.client: 🏃 View run Pycaret (TfIdf) at: http://localhost:5000/#/experiments/374735653194037029/runs/35dc518b01634137bc9e9effbcaf329c. 2024/12/06 14:38:28 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029.
Autosklearn¶
Notebook: P7_Test_AutoSKLearn_Ubuntu_via_WSL.ipynb
Une exploration avec autosklearn depuis un embedding TfIdf a également mené à classifier basique "Passive-agressive classifier". La performance est équivalente (accuracy 0.7) à celle d'un modèle de régression logistique ou extra trees, l'intérêt est limité.
Utilisation des solutions sur étagère de Azure¶
Ces outils nécessitent d'avoir un compte Azure (AutoML est payant)
AutoML¶
A partir du dataframe contenant les colonnes "text" (texte brut des tweets) et "target" AutoML détermine le pré-traitement le plus adéquat
Puis teste un ensemble d'algorithmes de classification avec les performances suivantes:
Le modèle est un ensemble combinant différentes régressions logistiques et un modèle linearSVM. L'algorithme de plus grand poind a les paramètres suivants:
Poids de l'ensemble: 0.23076923076923078
Prétraitement:
{
"class_name": "MaxAbsScaler",
"module": "sklearn.preprocessing",
"param_args": [],
"param_kwargs": {},
"prepared_kwargs": {},
"spec_class": "preproc"
}
Algorithme d'entrainement
{
"class_name": "LogisticRegression",
"module": "sklearn.linear_model",
"param_args": [],
"param_kwargs": {
"C": 3237.45754281764,
"class_weight": "balanced",
"multi_class": "ovr",
"penalty": "l1",
"solver": "saga"
},
"prepared_kwargs": {},
"spec_class": "sklearn"
}
Ce modèle a été téléchargé et loggé dans AutoML (nécessite Linux).
Il nous apprend qu'une solution basée sur la régression logistique peut être relativement performante.
Azure Language Analytics¶
Notebook: azure_language_model.ipynb
Une API d'analyse de langage est également proposée par Azure. Ce modèle repose sur des modèles NLP avec des architectures de type transformer optimisés pour comprendre un large éventail de phrases (T-NLG Turing Natural Language Generation).
Une accuracy de 0.63 nous prouve que le problème est loin d'être trivial, d'autant plus que les performances sont très différentes avec une très bonne détection de la classe positive (tweets négatifs) mais beaucoup de faux positifs. Donc un modèle alarmiste.
Modèle économique¶
En considérant la qualité informative apportée par SentimentIntensityAnalyser de NLTK nous allons combiner cette colonne avec l'analyse par régression logistique.
Tests de régression logistique¶
from sklearn.pipeline import Pipeline
from sklearn.compose import ColumnTransformer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.preprocessing import MaxAbsScaler
from sklearn.preprocessing import StandardScaler
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report
import pandas as pd
# Charger les DataFrames depuis les fichiers CSV
train_df = pd.read_csv('./data/train_df.csv')
val_df = pd.read_csv('./data/val_df.csv')
test_df = pd.read_csv('./data/test_df.csv')
# Séparer les features et les labels pour chaque ensemble
X_train = train_df[['text', 'sia_sentiment']]
y_train = train_df['target']
X_val = val_df[['text', 'sia_sentiment']]
y_val = val_df['target']
X_test = test_df[['text', 'sia_sentiment']]
y_test = test_df['target']
# Créer le préprocesseur pour appliquer les transformations aux colonnes respectives
preprocessor = ColumnTransformer(
transformers=[
('tfidf', TfidfVectorizer(min_df=2, max_df=0.5, ngram_range=(1, 2)), 'text'),
('scaler', Pipeline([('standard', StandardScaler()), ('maxabs', MaxAbsScaler())]), ['sia_sentiment'])
]
)
# Construire le pipeline avec le préprocesseur et le modèle de régression logistique
pipeline = Pipeline([
('preprocessor', preprocessor),
('classifier', LogisticRegression())
])
# Entraîner le pipeline sur les données d'entraînement
pipeline.fit(X_train, y_train)
# Évaluer le pipeline sur l'ensemble de validation
y_val_pred = pipeline.predict(X_val)
print("Validation Set Performance:\n", classification_report(y_val, y_val_pred))
# Prédire et évaluer le pipeline sur l'ensemble de test final
y_test_pred = pipeline.predict(X_test)
print("Test Set Performance:\n", classification_report(y_test, y_test_pred))
Validation Set Performance:
precision recall f1-score support
0 0.71 0.74 0.73 726
1 0.73 0.70 0.72 730
accuracy 0.72 1456
macro avg 0.72 0.72 0.72 1456
weighted avg 0.72 0.72 0.72 1456
Test Set Performance:
precision recall f1-score support
0 0.74 0.75 0.75 807
1 0.75 0.74 0.75 810
accuracy 0.75 1617
macro avg 0.75 0.75 0.75 1617
weighted avg 0.75 0.75 0.75 1617
L'exactitude est au niveau des meilleurs résultats obtenus jusqu'ici. On peut optimiser de façon automatique par rapport à une métrique.
# ! pip install optuna
# Priorité à l'accuracy
import optuna
from sklearn.pipeline import Pipeline
from sklearn.compose import ColumnTransformer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.preprocessing import StandardScaler, MaxAbsScaler
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
# Charger les DataFrames depuis les fichiers CSV
train_df = pd.read_csv('./data/train_df.csv')
val_df = pd.read_csv('./data/val_df.csv')
# Séparer les features et les labels pour chaque ensemble
X_train = train_df[['text', 'sia_sentiment']]
y_train = train_df['target']
X_val = val_df[['text', 'sia_sentiment']]
y_val = val_df['target']
# Définir la fonction d'objectif pour Optuna
def objective(trial):
# Hyperparamètres à optimiser
C = trial.suggest_loguniform("C", 1e-4, 1e4) # Paramètre de régularisation
solver_penalty = trial.suggest_categorical(
"solver_penalty",
[
("lbfgs", "l2"),
("liblinear", "l1"),
("liblinear", "l2"),
("saga", "l1"),
("saga", "l2"),
("saga", "elasticnet")
]
)
solver, penalty = solver_penalty
l1_ratio = None
if penalty == "elasticnet":
l1_ratio = trial.suggest_float("l1_ratio", 0.0, 1.0)
# Préprocesseur : reste inchangé
preprocessor = ColumnTransformer(
transformers=[
('tfidf', TfidfVectorizer(min_df=2, max_df=0.5, ngram_range=(1, 2)), 'text'),
('scaler', Pipeline([('standard', StandardScaler()), ('maxabs', MaxAbsScaler())]), ['sia_sentiment'])
]
)
# Pipeline avec la régression logistique
classifier = LogisticRegression(
C=C, solver=solver, penalty=penalty, l1_ratio=l1_ratio, max_iter=200, random_state=42
)
pipeline = Pipeline([
('preprocessor', preprocessor),
('classifier', classifier)
])
# Entraîner le pipeline
pipeline.fit(X_train, y_train)
# Évaluer les performances sur l'ensemble de validation
y_val_pred = pipeline.predict(X_val)
accuracy = accuracy_score(y_val, y_val_pred)
return accuracy
# Créer l’étude Optuna
study = optuna.create_study(direction="maximize")
study.optimize(objective, n_trials=100)
# Résultats
print("Best hyperparameters:", study.best_params)
print("Best validation accuracy:", study.best_value)
[I 2024-12-07 10:31:54,697] A new study created in memory with name: no-name-3e0aed75-acfe-4a69-b0e2-aea23cc5dacb
[I 2024-12-07 10:31:54,889] Trial 0 finished with value: 0.7060439560439561 and parameters: {'C': 48.3065114957649, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 0 with value: 0.7060439560439561.
[I 2024-12-07 10:32:04,424] Trial 1 finished with value: 0.7108516483516484 and parameters: {'C': 200.56080735948737, 'solver_penalty': ('saga', 'l1')}. Best is trial 1 with value: 0.7108516483516484.
[I 2024-12-07 10:32:04,718] Trial 2 finished with value: 0.7067307692307693 and parameters: {'C': 637.8301427026097, 'solver_penalty': ('saga', 'l2')}. Best is trial 1 with value: 0.7108516483516484.
[I 2024-12-07 10:32:04,859] Trial 3 finished with value: 0.49862637362637363 and parameters: {'C': 0.0001027609324678321, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 1 with value: 0.7108516483516484.
[I 2024-12-07 10:32:04,992] Trial 4 finished with value: 0.6428571428571429 and parameters: {'C': 0.01391284440660145, 'solver_penalty': ('saga', 'l2')}. Best is trial 1 with value: 0.7108516483516484.
[I 2024-12-07 10:32:05,158] Trial 5 finished with value: 0.7087912087912088 and parameters: {'C': 512.987397672897, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 1 with value: 0.7108516483516484.
[I 2024-12-07 10:32:05,289] Trial 6 finished with value: 0.7273351648351648 and parameters: {'C': 2.076658252028446, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:05,414] Trial 7 finished with value: 0.6407967032967034 and parameters: {'C': 0.0031912851774541107, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:05,539] Trial 8 finished with value: 0.6407967032967034 and parameters: {'C': 0.00028667228489203566, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:05,706] Trial 9 finished with value: 0.6559065934065934 and parameters: {'C': 0.18401762285747889, 'solver_penalty': ('saga', 'elasticnet'), 'l1_ratio': 0.27440111958114355}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:05,862] Trial 10 finished with value: 0.7211538461538461 and parameters: {'C': 3.8546543537613616, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:06,007] Trial 11 finished with value: 0.7177197802197802 and parameters: {'C': 3.0216717439987075, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:06,153] Trial 12 finished with value: 0.7211538461538461 and parameters: {'C': 3.774638160211811, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:06,291] Trial 13 finished with value: 0.6565934065934066 and parameters: {'C': 0.2452845662702841, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:11,788] Trial 14 finished with value: 0.7184065934065934 and parameters: {'C': 37.64471669591877, 'solver_penalty': ('saga', 'elasticnet'), 'l1_ratio': 0.9991700992252912}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:11,958] Trial 15 finished with value: 0.7039835164835165 and parameters: {'C': 3244.703564831607, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:12,327] Trial 16 finished with value: 0.6991758241758241 and parameters: {'C': 0.5083913943056121, 'solver_penalty': ('saga', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:12,458] Trial 17 finished with value: 0.6394230769230769 and parameters: {'C': 0.010819200946400645, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:12,629] Trial 18 finished with value: 0.7135989010989011 and parameters: {'C': 9.292646772527176, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:12,758] Trial 19 finished with value: 0.6394230769230769 and parameters: {'C': 0.06122944202323934, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:13,249] Trial 20 finished with value: 0.720467032967033 and parameters: {'C': 1.0526201821615948, 'solver_penalty': ('saga', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:13,439] Trial 21 finished with value: 0.7081043956043956 and parameters: {'C': 13.255791377816829, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:13,588] Trial 22 finished with value: 0.7245879120879121 and parameters: {'C': 2.7118633406490917, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:13,737] Trial 23 finished with value: 0.7273351648351648 and parameters: {'C': 2.4628192051970528, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:13,881] Trial 24 finished with value: 0.720467032967033 and parameters: {'C': 1.052890400993182, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:14,050] Trial 25 finished with value: 0.6428571428571429 and parameters: {'C': 0.04709348643934221, 'solver_penalty': ('saga', 'elasticnet'), 'l1_ratio': 0.06376653367660612}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:14,184] Trial 26 finished with value: 0.7135989010989011 and parameters: {'C': 54.32036650815599, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:14,390] Trial 27 finished with value: 0.7239010989010989 and parameters: {'C': 16.029227139761883, 'solver_penalty': ('saga', 'l2')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:14,527] Trial 28 finished with value: 0.6723901098901099 and parameters: {'C': 0.1290471658068941, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:14,681] Trial 29 finished with value: 0.7005494505494505 and parameters: {'C': 108.95450321535883, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 6 with value: 0.7273351648351648.
[I 2024-12-07 10:32:14,817] Trial 30 finished with value: 0.728021978021978 and parameters: {'C': 1.5652726000962356, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 30 with value: 0.728021978021978.
[I 2024-12-07 10:32:14,959] Trial 31 finished with value: 0.7273351648351648 and parameters: {'C': 1.4407985526147986, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 30 with value: 0.728021978021978.
[I 2024-12-07 10:32:15,096] Trial 32 finished with value: 0.7211538461538461 and parameters: {'C': 0.7198982599267522, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 30 with value: 0.728021978021978.
[I 2024-12-07 10:32:15,262] Trial 33 finished with value: 0.7101648351648352 and parameters: {'C': 10.362738508312562, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 30 with value: 0.728021978021978.
[I 2024-12-07 10:32:15,403] Trial 34 finished with value: 0.720467032967033 and parameters: {'C': 1.1806033676865633, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 30 with value: 0.728021978021978.
[I 2024-12-07 10:32:15,680] Trial 35 finished with value: 0.720467032967033 and parameters: {'C': 36.18325105850731, 'solver_penalty': ('saga', 'l2')}. Best is trial 30 with value: 0.728021978021978.
[I 2024-12-07 10:32:15,827] Trial 36 finished with value: 0.7060439560439561 and parameters: {'C': 287.9319886327543, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 30 with value: 0.728021978021978.
[I 2024-12-07 10:32:16,165] Trial 37 finished with value: 0.6717032967032966 and parameters: {'C': 0.3509621676470495, 'solver_penalty': ('saga', 'l1')}. Best is trial 30 with value: 0.728021978021978.
[I 2024-12-07 10:32:16,285] Trial 38 finished with value: 0.6435439560439561 and parameters: {'C': 0.0252723814490392, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 30 with value: 0.728021978021978.
[I 2024-12-07 10:32:16,405] Trial 39 finished with value: 0.6394230769230769 and parameters: {'C': 0.006273139895109939, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 30 with value: 0.728021978021978.
[I 2024-12-07 10:32:16,583] Trial 40 finished with value: 0.7053571428571429 and parameters: {'C': 2465.2735416667483, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 30 with value: 0.728021978021978.
[I 2024-12-07 10:32:16,723] Trial 41 finished with value: 0.7177197802197802 and parameters: {'C': 3.054115916331515, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 30 with value: 0.728021978021978.
[I 2024-12-07 10:32:16,850] Trial 42 finished with value: 0.7300824175824175 and parameters: {'C': 1.7830920096590845, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 42 with value: 0.7300824175824175.
[I 2024-12-07 10:32:16,996] Trial 43 finished with value: 0.7129120879120879 and parameters: {'C': 5.399197240152295, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 42 with value: 0.7300824175824175.
[I 2024-12-07 10:32:17,145] Trial 44 finished with value: 0.720467032967033 and parameters: {'C': 1.6130538653092246, 'solver_penalty': ('saga', 'l2')}. Best is trial 42 with value: 0.7300824175824175.
[I 2024-12-07 10:32:17,275] Trial 45 finished with value: 0.6394230769230769 and parameters: {'C': 0.11259082171723664, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 42 with value: 0.7300824175824175.
[I 2024-12-07 10:32:17,401] Trial 46 finished with value: 0.49862637362637363 and parameters: {'C': 0.0015838309002691436, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 42 with value: 0.7300824175824175.
[I 2024-12-07 10:32:17,553] Trial 47 finished with value: 0.6538461538461539 and parameters: {'C': 0.23598237506476527, 'solver_penalty': ('saga', 'elasticnet'), 'l1_ratio': 0.7941477491165135}. Best is trial 42 with value: 0.7300824175824175.
[I 2024-12-07 10:32:17,688] Trial 48 finished with value: 0.6978021978021978 and parameters: {'C': 0.4996502357945077, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 42 with value: 0.7300824175824175.
[I 2024-12-07 10:32:17,819] Trial 49 finished with value: 0.7287087912087912 and parameters: {'C': 5.499395357433505, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 42 with value: 0.7300824175824175.
[I 2024-12-07 10:32:17,960] Trial 50 finished with value: 0.7293956043956044 and parameters: {'C': 7.273785617991492, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 42 with value: 0.7300824175824175.
[I 2024-12-07 10:32:18,094] Trial 51 finished with value: 0.7293956043956044 and parameters: {'C': 7.295365266460295, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 42 with value: 0.7300824175824175.
[I 2024-12-07 10:32:18,227] Trial 52 finished with value: 0.7211538461538461 and parameters: {'C': 28.561517497443077, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 42 with value: 0.7300824175824175.
[I 2024-12-07 10:32:18,355] Trial 53 finished with value: 0.7287087912087912 and parameters: {'C': 6.969677502437183, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 42 with value: 0.7300824175824175.
[I 2024-12-07 10:32:18,482] Trial 54 finished with value: 0.7293956043956044 and parameters: {'C': 6.4773691412622485, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 42 with value: 0.7300824175824175.
[I 2024-12-07 10:32:18,608] Trial 55 finished with value: 0.7321428571428571 and parameters: {'C': 7.900363832371767, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:18,745] Trial 56 finished with value: 0.7101648351648352 and parameters: {'C': 118.33962151604503, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:18,887] Trial 57 finished with value: 0.7232142857142857 and parameters: {'C': 18.789943582073427, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:19,027] Trial 58 finished with value: 0.7108516483516484 and parameters: {'C': 68.094618134418, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:19,165] Trial 59 finished with value: 0.7287087912087912 and parameters: {'C': 5.2839952401472114, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:19,342] Trial 60 finished with value: 0.7060439560439561 and parameters: {'C': 416.33884224487764, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:19,478] Trial 61 finished with value: 0.7293956043956044 and parameters: {'C': 7.4316363006040715, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:19,616] Trial 62 finished with value: 0.7232142857142857 and parameters: {'C': 18.863468248377256, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:19,748] Trial 63 finished with value: 0.7287087912087912 and parameters: {'C': 7.099188816139169, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:19,881] Trial 64 finished with value: 0.720467032967033 and parameters: {'C': 3.7249391118591695, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:20,011] Trial 65 finished with value: 0.7211538461538461 and parameters: {'C': 25.682571952786486, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:20,142] Trial 66 finished with value: 0.7252747252747253 and parameters: {'C': 10.825439071272548, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:20,273] Trial 67 finished with value: 0.709478021978022 and parameters: {'C': 88.35955640861357, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:20,400] Trial 68 finished with value: 0.7142857142857143 and parameters: {'C': 0.6829040306220402, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:20,530] Trial 69 finished with value: 0.720467032967033 and parameters: {'C': 42.65318675208944, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:32,385] Trial 70 finished with value: 0.7074175824175825 and parameters: {'C': 1205.5420110316056, 'solver_penalty': ('saga', 'l1')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:32,520] Trial 71 finished with value: 0.7307692307692307 and parameters: {'C': 7.6714315535333055, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:32,647] Trial 72 finished with value: 0.7252747252747253 and parameters: {'C': 2.1975217216904297, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:32,776] Trial 73 finished with value: 0.7239010989010989 and parameters: {'C': 11.244189178763294, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:32,901] Trial 74 finished with value: 0.7259615384615384 and parameters: {'C': 4.771205522738084, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 55 with value: 0.7321428571428571.
[I 2024-12-07 10:32:33,028] Trial 75 finished with value: 0.7328296703296703 and parameters: {'C': 7.9564999785275266, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:33,194] Trial 76 finished with value: 0.7232142857142857 and parameters: {'C': 17.77178952997677, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:34,764] Trial 77 finished with value: 0.7225274725274725 and parameters: {'C': 3.374479858021773, 'solver_penalty': ('saga', 'elasticnet'), 'l1_ratio': 0.5505041115807141}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:34,909] Trial 78 finished with value: 0.7074175824175825 and parameters: {'C': 185.68078949229965, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:35,039] Trial 79 finished with value: 0.7239010989010989 and parameters: {'C': 2.0804975185593055, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:35,167] Trial 80 finished with value: 0.7314560439560439 and parameters: {'C': 9.010805253370354, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:35,301] Trial 81 finished with value: 0.7321428571428571 and parameters: {'C': 8.253122671997595, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:35,431] Trial 82 finished with value: 0.7211538461538461 and parameters: {'C': 26.09427439983369, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:35,623] Trial 83 finished with value: 0.7211538461538461 and parameters: {'C': 12.587045145421937, 'solver_penalty': ('saga', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:35,748] Trial 84 finished with value: 0.7142857142857143 and parameters: {'C': 0.8388161988432664, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:35,882] Trial 85 finished with value: 0.7115384615384616 and parameters: {'C': 59.810930429639335, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:36,016] Trial 86 finished with value: 0.7321428571428571 and parameters: {'C': 8.312113726819074, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:36,159] Trial 87 finished with value: 0.7225274725274725 and parameters: {'C': 1.606623285820615, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:37,685] Trial 88 finished with value: 0.7218406593406593 and parameters: {'C': 3.913288746872534, 'solver_penalty': ('saga', 'l1')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:37,810] Trial 89 finished with value: 0.7046703296703297 and parameters: {'C': 0.4786775631313084, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:45,100] Trial 90 finished with value: 0.7239010989010989 and parameters: {'C': 15.965475049142517, 'solver_penalty': ('saga', 'elasticnet'), 'l1_ratio': 0.49462346006163}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:45,228] Trial 91 finished with value: 0.7321428571428571 and parameters: {'C': 7.995948978393949, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:45,356] Trial 92 finished with value: 0.7300824175824175 and parameters: {'C': 9.391769337954077, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:45,487] Trial 93 finished with value: 0.7218406593406593 and parameters: {'C': 35.895995494414954, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:45,613] Trial 94 finished with value: 0.7287087912087912 and parameters: {'C': 9.7891862752946, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:45,737] Trial 95 finished with value: 0.7252747252747253 and parameters: {'C': 3.140755020443195, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:45,860] Trial 96 finished with value: 0.7232142857142857 and parameters: {'C': 1.3479796666865314, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:46,101] Trial 97 finished with value: 0.7211538461538461 and parameters: {'C': 26.72823446094823, 'solver_penalty': ('saga', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:46,229] Trial 98 finished with value: 0.7314560439560439 and parameters: {'C': 8.91685632112205, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
[I 2024-12-07 10:32:46,356] Trial 99 finished with value: 0.7252747252747253 and parameters: {'C': 2.170719105702549, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 75 with value: 0.7328296703296703.
Best hyperparameters: {'C': 7.9564999785275266, 'solver_penalty': ('liblinear', 'l2')}
Best validation accuracy: 0.7328296703296703
# # Alternative priorité au recall ATTENTION la recherche OPTUNA peut dériver vers DUMMY (cf. Pycaret)
# from sklearn.metrics import recall_score
# # Définir la fonction d'objectif pour Optuna avec le recall comme métrique
# def objective(trial):
# # Hyperparamètres à optimiser
# C = trial.suggest_loguniform("C", 1e-4, 1e4) # Paramètre de régularisation
# solver_penalty = trial.suggest_categorical(
# "solver_penalty",
# [
# ("lbfgs", "l2"),
# ("liblinear", "l1"),
# ("liblinear", "l2"),
# ("saga", "l1"),
# ("saga", "l2"),
# ("saga", "elasticnet")
# ]
# )
# solver, penalty = solver_penalty
# l1_ratio = None
# if penalty == "elasticnet":
# l1_ratio = trial.suggest_float("l1_ratio", 0.0, 1.0)
# # Préprocesseur : reste inchangé
# preprocessor = ColumnTransformer(
# transformers=[
# ('tfidf', TfidfVectorizer(min_df=2, max_df=0.5, ngram_range=(1, 2)), 'text'),
# ('scaler', Pipeline([('standard', StandardScaler()), ('maxabs', MaxAbsScaler())]), ['sia_sentiment'])
# ]
# )
# # Pipeline avec la régression logistique
# classifier = LogisticRegression(
# C=C, solver=solver, penalty=penalty, l1_ratio=l1_ratio, max_iter=200, random_state=42
# )
# pipeline = Pipeline([
# ('preprocessor', preprocessor),
# ('classifier', classifier)
# ])
# # Entraîner le pipeline
# pipeline.fit(X_train, y_train)
# # Évaluer les performances sur l'ensemble de validation
# y_val_pred = pipeline.predict(X_val)
# recall = recall_score(y_val, y_val_pred, average='binary') # 'binary' pour les problèmes binaires
# return recall
# # Créer l’étude Optuna
# study = optuna.create_study(direction="maximize")
# study.optimize(objective, n_trials=100)
# # Résultats
# print("Best hyperparameters:", study.best_params)
# print("Best validation recall:", study.best_value)
[I 2024-12-07 10:30:18,798] A new study created in memory with name: no-name-cab3b0d7-ad64-48a9-b632-7cf02fb6d2d7
[I 2024-12-07 10:30:18,936] Trial 0 finished with value: 0.7109589041095891 and parameters: {'C': 0.009571104218172161, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 0 with value: 0.7109589041095891.
[I 2024-12-07 10:30:19,085] Trial 1 finished with value: 0.7150684931506849 and parameters: {'C': 0.06415273246000311, 'solver_penalty': ('saga', 'l2')}. Best is trial 1 with value: 0.7150684931506849.
[I 2024-12-07 10:30:19,213] Trial 2 finished with value: 0.7109589041095891 and parameters: {'C': 0.028039727193782542, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 1 with value: 0.7150684931506849.
[I 2024-12-07 10:30:19,351] Trial 3 finished with value: 0.6726027397260274 and parameters: {'C': 2020.0322961341367, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 1 with value: 0.7150684931506849.
[I 2024-12-07 10:30:23,683] Trial 4 finished with value: 0.6821917808219178 and parameters: {'C': 22.50247958653081, 'solver_penalty': ('saga', 'l1')}. Best is trial 1 with value: 0.7150684931506849.
[I 2024-12-07 10:30:23,819] Trial 5 finished with value: 0.6767123287671233 and parameters: {'C': 95.54808524812915, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 1 with value: 0.7150684931506849.
[I 2024-12-07 10:30:23,940] Trial 6 finished with value: 0.0 and parameters: {'C': 0.00020146296967088118, 'solver_penalty': ('saga', 'elasticnet'), 'l1_ratio': 0.9851950379586942}. Best is trial 1 with value: 0.7150684931506849.
[I 2024-12-07 10:30:24,232] Trial 7 finished with value: 0.6767123287671233 and parameters: {'C': 3742.9659887042912, 'solver_penalty': ('saga', 'l2')}. Best is trial 1 with value: 0.7150684931506849.
[I 2024-12-07 10:30:24,373] Trial 8 finished with value: 0.7082191780821918 and parameters: {'C': 0.003447988769695682, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 1 with value: 0.7150684931506849.
[I 2024-12-07 10:30:24,508] Trial 9 finished with value: 1.0 and parameters: {'C': 0.0024687139974742802, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:25,148] Trial 10 finished with value: 0.7109589041095891 and parameters: {'C': 1.636329066906484, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:25,287] Trial 11 finished with value: 0.7123287671232876 and parameters: {'C': 0.17002250581542477, 'solver_penalty': ('saga', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:25,438] Trial 12 finished with value: 0.7397260273972602 and parameters: {'C': 0.00011670429851802753, 'solver_penalty': ('saga', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:25,558] Trial 13 finished with value: 0.0 and parameters: {'C': 0.00011775588307891899, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:25,689] Trial 14 finished with value: 0.0 and parameters: {'C': 0.0004857824169839984, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:25,827] Trial 15 finished with value: 0.7054794520547946 and parameters: {'C': 0.002428434938922269, 'solver_penalty': ('saga', 'elasticnet'), 'l1_ratio': 0.0672468612167858}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:25,952] Trial 16 finished with value: 0.7123287671232876 and parameters: {'C': 0.0011318633070241063, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:26,097] Trial 17 finished with value: 0.6958904109589041 and parameters: {'C': 2.227904467814352, 'solver_penalty': ('saga', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:26,432] Trial 18 finished with value: 0.7191780821917808 and parameters: {'C': 0.34571184897149765, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:26,566] Trial 19 finished with value: 0.7109589041095891 and parameters: {'C': 0.010354047324899848, 'solver_penalty': ('saga', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:26,690] Trial 20 finished with value: 0.0 and parameters: {'C': 0.0001045622997685276, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:27,005] Trial 21 finished with value: 0.7136986301369863 and parameters: {'C': 0.31742183570977245, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:31,237] Trial 22 finished with value: 0.678082191780822 and parameters: {'C': 21.652405812022675, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:31,358] Trial 23 finished with value: 0.0 and parameters: {'C': 0.0011891863353576185, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:31,491] Trial 24 finished with value: 0.7041095890410959 and parameters: {'C': 0.014550372343297343, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:31,616] Trial 25 finished with value: 0.7178082191780822 and parameters: {'C': 0.0005155767145339538, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:31,744] Trial 26 finished with value: 0.7205479452054795 and parameters: {'C': 0.36291968833151195, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:31,870] Trial 27 finished with value: 0.7027397260273973 and parameters: {'C': 0.06366669436055868, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:32,021] Trial 28 finished with value: 0.6671232876712329 and parameters: {'C': 6.792739397023951, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:32,169] Trial 29 finished with value: 0.6671232876712329 and parameters: {'C': 456.10257951912087, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:32,318] Trial 30 finished with value: 0.7109589041095891 and parameters: {'C': 0.003532157403077716, 'solver_penalty': ('saga', 'elasticnet'), 'l1_ratio': 0.6530470821688465}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:32,444] Trial 31 finished with value: 0.6972602739726027 and parameters: {'C': 0.43221868704778343, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:32,584] Trial 32 finished with value: 0.7150684931506849 and parameters: {'C': 0.06758946656041528, 'solver_penalty': ('saga', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:32,710] Trial 33 finished with value: 0.7027397260273973 and parameters: {'C': 0.03280354113079415, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:33,123] Trial 34 finished with value: 0.736986301369863 and parameters: {'C': 0.6995803425707908, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:33,290] Trial 35 finished with value: 0.6958904109589041 and parameters: {'C': 6.637678988777351, 'solver_penalty': ('saga', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:33,417] Trial 36 finished with value: 0.7205479452054795 and parameters: {'C': 0.0003506453127589334, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:33,564] Trial 37 finished with value: 0.6602739726027397 and parameters: {'C': 167.15846019988948, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:34,051] Trial 38 finished with value: 0.7054794520547946 and parameters: {'C': 1.1561283669386717, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:34,191] Trial 39 finished with value: 0.6931506849315069 and parameters: {'C': 5.01106392418053, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:34,447] Trial 40 finished with value: 0.7164383561643836 and parameters: {'C': 0.120497647565879, 'solver_penalty': ('saga', 'elasticnet'), 'l1_ratio': 0.06316101158600895}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:34,580] Trial 41 finished with value: 0.7232876712328767 and parameters: {'C': 0.0002674009125108973, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:34,711] Trial 42 finished with value: 0.7123287671232876 and parameters: {'C': 0.0010857775613465446, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:34,848] Trial 43 finished with value: 0.7109589041095891 and parameters: {'C': 0.004750386493554755, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:34,983] Trial 44 finished with value: 0.7273972602739726 and parameters: {'C': 0.0002442055457616397, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:35,114] Trial 45 finished with value: 0.7315068493150685 and parameters: {'C': 0.00021943674349641712, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:35,241] Trial 46 finished with value: 0.7123287671232876 and parameters: {'C': 0.0007357537969444025, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:35,371] Trial 47 finished with value: 0.7397260273972602 and parameters: {'C': 0.0001416069250561768, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:35,656] Trial 48 finished with value: 0.6767123287671233 and parameters: {'C': 9564.674126670865, 'solver_penalty': ('saga', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:35,795] Trial 49 finished with value: 0.7534246575342466 and parameters: {'C': 0.0001093517622746785, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:35,921] Trial 50 finished with value: 1.0 and parameters: {'C': 0.0021131983932848168, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:36,042] Trial 51 finished with value: 0.0 and parameters: {'C': 0.001476963570819977, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:36,167] Trial 52 finished with value: 0.0 and parameters: {'C': 0.00015011481177774175, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:36,308] Trial 53 finished with value: 0.7041095890410959 and parameters: {'C': 0.005430472296524487, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:36,441] Trial 54 finished with value: 0.0 and parameters: {'C': 0.00011156741192898712, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:36,576] Trial 55 finished with value: 0.7109589041095891 and parameters: {'C': 0.002188774154661483, 'solver_penalty': ('saga', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:36,701] Trial 56 finished with value: 0.0 and parameters: {'C': 0.0005546739369558381, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:36,825] Trial 57 finished with value: 0.7095890410958904 and parameters: {'C': 0.018413455043702728, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:36,948] Trial 58 finished with value: 0.0 and parameters: {'C': 0.0003622620874549911, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:37,075] Trial 59 finished with value: 0.7109589041095891 and parameters: {'C': 0.007486389244271477, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:37,214] Trial 60 finished with value: 0.7109589041095891 and parameters: {'C': 0.0027237434781473505, 'solver_penalty': ('saga', 'elasticnet'), 'l1_ratio': 0.4556770111582642}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:37,343] Trial 61 finished with value: 0.7315068493150685 and parameters: {'C': 0.0002080273197784736, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:37,472] Trial 62 finished with value: 0.7602739726027398 and parameters: {'C': 0.00010007790847036462, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:37,602] Trial 63 finished with value: 0.7561643835616438 and parameters: {'C': 0.00010540091043783181, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:37,735] Trial 64 finished with value: 0.7123287671232876 and parameters: {'C': 0.0008390177256486524, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:37,861] Trial 65 finished with value: 0.7520547945205479 and parameters: {'C': 0.00011403524033752756, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:37,992] Trial 66 finished with value: 0.7191780821917808 and parameters: {'C': 0.000504209462989198, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:38,120] Trial 67 finished with value: 0.7123287671232876 and parameters: {'C': 0.0016606424877799754, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:38,280] Trial 68 finished with value: 0.7397260273972602 and parameters: {'C': 0.00011679676083284586, 'solver_penalty': ('saga', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:38,409] Trial 69 finished with value: 0.7232876712328767 and parameters: {'C': 0.00026572542339834906, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:38,539] Trial 70 finished with value: 0.7602739726027398 and parameters: {'C': 0.00010208650562488105, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:38,670] Trial 71 finished with value: 0.7191780821917808 and parameters: {'C': 0.00040715863410429893, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:38,803] Trial 72 finished with value: 0.7534246575342466 and parameters: {'C': 0.00010967847636103173, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:38,931] Trial 73 finished with value: 0.7534246575342466 and parameters: {'C': 0.00011003358111124966, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:39,063] Trial 74 finished with value: 0.7123287671232876 and parameters: {'C': 0.0007589543443209865, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:39,196] Trial 75 finished with value: 0.7328767123287672 and parameters: {'C': 0.0001974297268589192, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:39,325] Trial 76 finished with value: 0.7191780821917808 and parameters: {'C': 0.00037609255859338803, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:39,452] Trial 77 finished with value: 0.7602739726027398 and parameters: {'C': 0.00010144379852084821, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:39,582] Trial 78 finished with value: 0.7356164383561644 and parameters: {'C': 0.00018092466347034242, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:39,708] Trial 79 finished with value: 0.7123287671232876 and parameters: {'C': 0.0016017905046634847, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:39,830] Trial 80 finished with value: 0.7109589041095891 and parameters: {'C': 0.0006250741901553178, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:39,960] Trial 81 finished with value: 0.7561643835616438 and parameters: {'C': 0.00010714800523491437, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:40,090] Trial 82 finished with value: 0.7205479452054795 and parameters: {'C': 0.0003167689602014841, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:40,220] Trial 83 finished with value: 0.736986301369863 and parameters: {'C': 0.00016430312314145542, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:40,352] Trial 84 finished with value: 0.7123287671232876 and parameters: {'C': 0.0009399010635942511, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:40,476] Trial 85 finished with value: 0.0 and parameters: {'C': 0.00010342268355864702, 'solver_penalty': ('saga', 'elasticnet'), 'l1_ratio': 0.4555382047552521}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:40,605] Trial 86 finished with value: 0.7232876712328767 and parameters: {'C': 0.000258301043428223, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:40,737] Trial 87 finished with value: 0.7191780821917808 and parameters: {'C': 0.0004738980275340035, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:40,866] Trial 88 finished with value: 0.7232876712328767 and parameters: {'C': 0.000259918176600576, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:40,993] Trial 89 finished with value: 0.0 and parameters: {'C': 0.00015680789225680215, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:41,126] Trial 90 finished with value: 0.7123287671232876 and parameters: {'C': 0.044410524639196064, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:41,256] Trial 91 finished with value: 0.7602739726027398 and parameters: {'C': 0.00010199629305695127, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:41,384] Trial 92 finished with value: 0.7356164383561644 and parameters: {'C': 0.00017207436473270813, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:41,515] Trial 93 finished with value: 0.7205479452054795 and parameters: {'C': 0.0003464622998222216, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:41,643] Trial 94 finished with value: 0.7602739726027398 and parameters: {'C': 0.00010163516277676701, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:41,772] Trial 95 finished with value: 0.7123287671232876 and parameters: {'C': 0.0006060382405807728, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:41,926] Trial 96 finished with value: 0.0 and parameters: {'C': 0.00019477991592964757, 'solver_penalty': ('saga', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:42,058] Trial 97 finished with value: 0.0 and parameters: {'C': 0.00016548878263229316, 'solver_penalty': ('liblinear', 'l1')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:42,196] Trial 98 finished with value: 0.7109589041095891 and parameters: {'C': 0.010746018983921921, 'solver_penalty': ('lbfgs', 'l2')}. Best is trial 9 with value: 1.0.
[I 2024-12-07 10:30:42,321] Trial 99 finished with value: 0.7095890410958904 and parameters: {'C': 0.0011422745102939427, 'solver_penalty': ('liblinear', 'l2')}. Best is trial 9 with value: 1.0.
Best hyperparameters: {'C': 0.0024687139974742802, 'solver_penalty': ('saga', 'l1')}
Best validation recall: 1.0
import pandas as pd
from sklearn.compose import ColumnTransformer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.preprocessing import StandardScaler, MaxAbsScaler
from sklearn.linear_model import LogisticRegression
from sklearn.pipeline import Pipeline
from sklearn.metrics import confusion_matrix, classification_report, ConfusionMatrixDisplay
import matplotlib.pyplot as plt
# Charger les DataFrames depuis les fichiers CSV
train_df = pd.read_csv('./data/train_df.csv')
val_df = pd.read_csv('./data/val_df.csv')
test_df = pd.read_csv('./data/test_df.csv')
# Séparer les features et les labels pour chaque ensemble
X_train = train_df[['text', 'sia_sentiment']]
y_train = train_df['target']
X_test = test_df[['text', 'sia_sentiment']]
y_test = test_df['target']
# Meilleurs paramètres obtenus via Optuna
best_params = study.best_params # Charger les paramètres depuis Optuna
# Extraction des paramètres spécifiques
C = best_params['C']
solver_penalty = best_params['solver_penalty']
solver, penalty = solver_penalty
l1_ratio = best_params.get('l1_ratio', None) # Peut être None si non utilisé
# Créer le préprocesseur pour les transformations
preprocessor = ColumnTransformer(
transformers=[
('tfidf', TfidfVectorizer(min_df=2, max_df=0.5, ngram_range=(1, 2)), 'text'),
('scaler', Pipeline([('standard', StandardScaler()), ('maxabs', MaxAbsScaler())]), ['sia_sentiment'])
]
)
# Construire le pipeline avec les meilleurs paramètres
classifier = LogisticRegression(
C=C,
solver=solver,
penalty=penalty,
l1_ratio=l1_ratio,
max_iter=200, # ou autre valeur selon votre configuration
random_state=42
)
pipeline = Pipeline([
('preprocessor', preprocessor),
('classifier', classifier)
])
# Entraîner le pipeline sur les données d'entraînement
pipeline.fit(X_train, y_train)
# Prédire sur le jeu de test
y_test_pred = pipeline.predict(X_test)
# Afficher les performances
print("Test Set Performance:\n", classification_report(y_test, y_test_pred))
# Afficher la matrice de confusion
conf_matrix = confusion_matrix(y_test, y_test_pred, labels=pipeline.classes_)
disp = ConfusionMatrixDisplay(confusion_matrix=conf_matrix, display_labels=pipeline.classes_)
fig, ax = plt.subplots(figsize=(8, 8)) # Taille personnalisée pour plus de lisibilité
disp.plot(cmap=plt.cm.Blues, ax=ax)
ax.grid(False) # Désactiver la grille intérieure
plt.title("Matrice de confusion - Jeu de test")
plt.show()
Test Set Performance:
precision recall f1-score support
0 0.73 0.75 0.74 807
1 0.74 0.72 0.73 810
accuracy 0.74 1617
macro avg 0.74 0.74 0.74 1617
weighted avg 0.74 0.74 0.74 1617
L'optimisation n'a pas amené d'amélioration par rapport à une régression logistique simple.
Pistes pour des améliorations futures:
L'optimisation s'arrête ici. L'exactitude du modèle est très honorable pour des moyens de calcul aussi modestes. Il pourrait être intéressant de travailler le features engineering (utilisation d'autres colonnes comme la longueur des tweets, introduction de termes quadratiques etc...).
Un algorithme plus élaboré serait peut être plus susceptible d'être amélioré par ajustement des paramètres (RandomForest, GBC).
Sauvegarde du modèle dans MLFlow¶
import time
import pandas as pd
from sklearn.pipeline import Pipeline
from sklearn.compose import ColumnTransformer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.preprocessing import StandardScaler, FunctionTransformer
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import classification_report, confusion_matrix, recall_score, accuracy_score
import mlflow
import mlflow.sklearn
from nltk.sentiment.vader import SentimentIntensityAnalyzer
from sklearn import set_config
from mlflow.models.signature import infer_signature
import matplotlib.pyplot as plt
import seaborn as sns
# Initialiser Sentiment Intensity Analyzer de NLTK
sia = SentimentIntensityAnalyzer()
# Fonction pour ajouter sia_sentiment dans le pipeline
def compute_sia_sentiment(texts):
return [[sia.polarity_scores(text)['compound']] for text in texts]
# Charger les DataFrames depuis les fichiers CSV
train_df = pd.read_csv('./data/train_df.csv')
val_df = pd.read_csv('./data/val_df.csv')
test_df = pd.read_csv('./data/test_df.csv')
# Séparer les features et les labels et transformer les features en DataFrames
X_train, y_train = train_df[['text']], train_df['target']
X_val, y_val = val_df[['text']], val_df['target']
X_test, y_test = test_df[['text']], test_df['target']
# Configurer MLflow et commencer un run
mlflow.start_run(run_name="pipeline_lr")
# Créer le préprocesseur pour le pipeline
preprocessor = ColumnTransformer(
transformers=[
('tfidf', TfidfVectorizer(min_df=2, max_df=0.5, ngram_range=(1, 2)), 'text'),
('sia_sentiment', Pipeline([
('compute_sia', FunctionTransformer(compute_sia_sentiment, validate=False)),
('scaler', StandardScaler())
]), 'text')
]
)
# Créer le pipeline complet avec le préprocesseur
pipeline = Pipeline([
('preprocessor', preprocessor),
('classifier', LogisticRegression(max_iter=200, random_state=42))
])
# Mesurer le temps d'entraînement
start_time = time.time()
pipeline.fit(X_train, y_train)
training_time = time.time() - start_time
# Prédictions et évaluation sur le set de validation
y_val_pred = pipeline.predict(X_val)
val_report = classification_report(y_val, y_val_pred, output_dict=True)
val_conf_matrix = confusion_matrix(y_val, y_val_pred)
# Prédictions et évaluation sur le set de test
y_test_pred = pipeline.predict(X_test)
test_accuracy = accuracy_score(y_test, y_test_pred)
test_recall = recall_score(y_test, y_test_pred, pos_label=1) # Classe positive
test_report = classification_report(y_test, y_test_pred, output_dict=True)
test_conf_matrix = confusion_matrix(y_test, y_test_pred)
# Log du modèle dans MLflow
mlflow.sklearn.log_model(
pipeline, "model",
signature=infer_signature(X_train, y_train),
input_example=X_train.head(5)
)
# Logger les métriques principales
mlflow.log_metrics({
"TT": training_time, # Temps d'entraînement
"Accuracy": test_accuracy, # Accuracy pour comparaison PyCaret
"Recall": test_recall, # Recall pour comparaison PyCaret
"val_accuracy": val_report['accuracy'],
"val_f1": val_report['weighted avg']['f1-score'],
"test_f1": test_report['weighted avg']['f1-score']
})
# Log du rapport de classification et de la matrice de confusion sous forme d'artefacts
def log_conf_matrix(cm, labels, name):
fig, ax = plt.subplots(figsize=(6, 6))
sns.heatmap(cm, annot=True, fmt="d", cmap="Blues", xticklabels=labels, yticklabels=labels, ax=ax)
ax.set_xlabel("Predicted labels")
ax.set_ylabel("True labels")
plt.title(f"Confusion Matrix - {name}")
mlflow.log_figure(fig, f"{name}_confusion_matrix.png")
plt.close(fig)
# Log des matrices de confusion pour validation et test
log_conf_matrix(val_conf_matrix, labels=['negative', 'positive'], name="Validation")
log_conf_matrix(test_conf_matrix, labels=['negative', 'positive'], name="Test")
Downloading artifacts: 0%| | 0/7 [00:00<?, ?it/s]
Validation du serving payload¶
import mlflow
from mlflow.models import validate_serving_input
from mlflow.models.signature import infer_signature
import json
# Le serving payload sous forme de chaîne JSON
serving_payload = """{
"dataframe_split": {
"columns": [
"text"
],
"data": [
[
"more bodies found from air france flight. now its at 5 "
],
[
"@ChrisTFT yes it is insaaannnee over there! @ least that's what i'm told. have a safe flight"
],
[
"@ebassman Dang! Wish I was there! Hop a flight to chattanooga! "
],
[
"Off to the airport for Maria's album launch "
],
[
"@iamsmartalek Man, that is such a bummer to hear. I never even did the last software upgrade in anticipation of this one. "
]
]
}
}"""
# Parse the payload to a JSON object
serving_payload_json = json.loads(serving_payload)
# Enregistrer l'URI du run actif
active_run = mlflow.active_run()
if active_run:
model_uri = f"runs:/{active_run.info.run_id}/model"
# Valider l'entrée de service pour le modèle sans créer un nouveau run
validate_serving_input(model_uri, serving_payload_json)
# Compléter le run en cours en ajoutant le modèle et les autres informations
mlflow.sklearn.log_model(pipeline, "model", signature=infer_signature(X_train, y_train), input_example=X_train.head(5))
# Log des métriques ou autres informations nécessaires dans ce run actif
# Exemple: mlflow.log_metrics({"example_metric": value})
Downloading artifacts: 0%| | 0/7 [00:00<?, ?it/s]
Downloading artifacts: 0%| | 0/7 [00:00<?, ?it/s]
mlflow.end_run()
2024/12/07 11:52:34 INFO mlflow.tracking._tracking_service.client: 🏃 View run pipeline_lr at: http://localhost:5000/#/experiments/374735653194037029/runs/bd5029bd0143409fa7a3fa5a1b392e5f. 2024/12/07 11:52:34 INFO mlflow.tracking._tracking_service.client: 🧪 View experiment at: http://localhost:5000/#/experiments/374735653194037029.
model_uri
'runs:/bd5029bd0143409fa7a3fa5a1b392e5f/model'
import mlflow
import pandas as pd
# Échantillonner 1 ligne aléatoire de la colonne "text" de test_df
data = test_df[['text']].sample(1).reset_index(drop=True)
# Afficher la phrase
print("Phrase : ", data['text'].values[0])
# Charger le modèle enregistré
# model_uri défini précédemment
model = mlflow.pyfunc.load_model(model_uri)
# Effectuer la prédiction sur la phrase
# Envoie un DataFrame avec la colonne 'text'
prediction = model.predict(data)
# Afficher le sentiment
if prediction[0] == 0:
print("Sentiment : positif")
else:
print("Sentiment : négatif")
Phrase : Boarding the Shanghai flight. Sad to leave Beijing
Downloading artifacts: 0%| | 0/7 [00:00<?, ?it/s]
Sentiment : négatif
import mlflow
import pandas as pd
# Exemple d'entrée sous forme de chaîne
sentence = "My baggage was damaged and the airline took no responsibility for it."
# Créer un DataFrame avec la phrase
data = pd.DataFrame({'text': [sentence]})
# Afficher la phrase
print("Phrase : ", sentence)
# Modèle déja chargé
# Effectuer la prédiction sur la phrase
prediction = model.predict(data)
# Afficher le sentiment
if prediction[0] == 0:
print("Sentiment : positif")
else:
print("Sentiment : négatif")
Phrase : My baggage was damaged and the airline took no responsibility for it. Sentiment : négatif
Enregistrement du modèle loggé¶
# Enregistrer le modèle dans le Model Registry local
import mlflow
# Nom du modèle dans le Model Registry
model_name = "Pipeline_sia_lr"
# Enregistrement du modèle chargé dans le registre sous le nom spécifié
model_version = mlflow.register_model(model_uri=model_uri, name=model_name)
# Ajout de l'alias "champion" à la dernière version du modèle
client = mlflow.tracking.MlflowClient()
client.set_registered_model_alias(name=model_name, alias="champion_eco", version=model_version.version)
print(f"Modèle enregistré avec succès sous le nom '{model_name}' avec l'alias 'champion_eco'.")
Successfully registered model 'Pipeline_sia_lr'. 2024/12/07 12:00:17 INFO mlflow.store.model_registry.abstract_store: Waiting up to 300 seconds for model version to finish creation. Model name: Pipeline_sia_lr, version 1
Modèle enregistré avec succès sous le nom 'Pipeline_sia_lr' avec l'alias 'champion_eco'.
Created version '1' of model 'Pipeline_sia_lr'.
Sauvegarde du modèle sur Azure Blob¶
import mlflow
import os
import shutil
# URI du modèle dans le Model Registry
model_name = "Pipeline_sia_lr"
model_version = "1"
model_uri = f"models:/{model_name}/{model_version}"
# Répertoire local pour enregistrer le modèle
model_dir = "./model_temp" # Répertoire temporaire
# Supprimer le répertoire s'il existe déjà
if os.path.exists(model_dir):
shutil.rmtree(model_dir)
# Télécharger le modèle depuis MLflow
mlflow.artifacts.download_artifacts(artifact_uri=model_uri, dst_path=model_dir)
print(f"Modèle téléchargé en local sous {model_dir}")
Downloading artifacts: 0%| | 0/7 [00:00<?, ?it/s]
Modèle téléchargé en local sous ./model_temp
# ! pip install azure-storage-blob
from azure.storage.blob import BlobServiceClient
import os
import shutil
# Chaîne de connexion Azure Storage
storage_connection_string = os.environ.get("AZURE_STORAGE_CONNECTION_STRING")
container_name = "oc-p7-ecomodele"
# Initialiser le client de service Blob
blob_service_client = BlobServiceClient.from_connection_string(storage_connection_string)
# Chemin du répertoire contenant le modèle
local_model_path = model_dir
# Upload des fichiers du modèle
for root, dirs, files in os.walk(local_model_path):
for file_name in files:
# Chemin local du fichier
file_path = os.path.join(root, file_name)
# Chemin dans le container Azure Blob
blob_name = os.path.relpath(file_path, local_model_path)
blob_client = blob_service_client.get_blob_client(container=container_name, blob=blob_name)
# Upload du fichier
with open(file_path, "rb") as data:
blob_client.upload_blob(data, overwrite=True)
print(f"Modèle uploadé dans le container Azure Blob '{container_name}'.")
# Suppression du dossier local après l'upload
try:
# Vérifie si le dossier existe
if os.path.exists(local_model_path) and os.path.isdir(local_model_path):
# Suppression récursive du répertoire et de son contenu
shutil.rmtree(local_model_path)
print(f"Dossier local '{local_model_path}' supprimé.")
else:
print(f"Le dossier local '{local_model_path}' n'existe pas.")
except Exception as e:
print(f"Erreur lors de la suppression du dossier local : {e}")
Modèle uploadé dans le container Azure Blob 'oc-p7-ecomodele'. Dossier local './model_temp' supprimé.
Test de fonctionnement local du meilleur modèle classique¶
# !pip install deep-translator
# Test initial: run avec le modèle disponible localement
%run deploiement_local_flask_eco.py #
# run avec le modèle téléchargé depuis le blobstorage (le script télécharge localement le modèle)
%run deploiement_local_flask_eco_modeleAzure.py
Téléchargé : MLmodel Téléchargé : conda.yaml Téléchargé : input_example.json Téléchargé : model.pkl Téléchargé : python_env.yaml Téléchargé : registered_model_meta Téléchargé : requirements.txt Téléchargé : serving_input_example.json Modèle chargé avec succès. * Serving Flask app 'deploiement_local_flask_eco_modeleAzure' * Debug mode: off
WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead. * Running on http://127.0.0.1:5001 Press CTRL+C to quit
# run de l'app complète (avec les modules de test) qui va être déployée dans le pipeline vers la production avec le modèle réel monté
# import os # Assigner la variable d'environnement pour le mode test
# os.environ['FLASK_ENV'] = "production"
# %run "C:\\Users\\cecil\\Documents\\p7_app_test\\app.py"
Interrogation de l'API¶
import requests
url = "https://tweetseco-aqb3breuc4f6bsaj.francecentral-01.azurewebsites.net"
response = requests.get(url)
if response.status_code == 200:
print("Réponse de l'API :")
print(response)
else:
print(f"Erreur {response.status_code}: {response.text}")
Réponse de l'API : <Response [200]>
import requests
# URL de l'endpoint /predict
url = "https://tweetseco-aqb3breuc4f6bsaj.francecentral-01.azurewebsites.net/predict"
# Liste des textes à analyser
texts = [
"I love programming!",
"Today is such a bad day.",
"The weather is amazing.",
"I'm feeling really sad right now.",
"This is the best thing ever!"
]
# Stocker les résultats
predictions = []
# Boucle sur chaque texte
for text in texts:
# Construire le payload JSON
payload = {"text": text}
try:
# Envoyer la requête POST
response = requests.post(url, json=payload)
# Vérifier le statut de la réponse
if response.status_code == 200:
result = response.json()
predictions.append({
"text": text,
"prediction": result.get("prediction"),
"translated_text": result.get("translated_text", "N/A") # Si la traduction est incluse
})
else:
predictions.append({
"text": text,
"error": f"Erreur {response.status_code}: {response.text}"
})
except Exception as e:
predictions.append({
"text": text,
"error": f"Exception: {str(e)}"
})
# Afficher les résultats
for pred in predictions:
print(f"Texte: {pred['text']}")
if "prediction" in pred:
print(f" - Prédiction : {pred['prediction']}")
print(f" - Texte traduit : {pred['translated_text']}")
else:
print(f" - Erreur : {pred['error']}")
Texte: I love programming! - Prédiction : [0] - Texte traduit : I love programming! Texte: Today is such a bad day. - Prédiction : [1] - Texte traduit : Today is such a bad day. Texte: The weather is amazing. - Prédiction : [0] - Texte traduit : The weather is amazing. Texte: I'm feeling really sad right now. - Prédiction : [1] - Texte traduit : I'm feeling really sad right now. Texte: This is the best thing ever! - Prédiction : [0] - Texte traduit : This is the best thing ever!
Amélioration continue¶
Récupération des insights depuis Azure si on a une accumulation sur une durée déterminée on ré-entraine le modèle.
import requests
import pandas as pd
from datetime import datetime, timedelta
# Remplacez par vos propres valeurs
APP_ID = "be803281-fd4b-46ce-98cd-becf27978e7f" # ID de l'application Application Insights
API_KEY = "yn6jwh0q3psjm43px8y6xgots6cyrjusftkd41f8" # Clé d'API générée dans Azure
# URL de l'API
BASE_URL = f"https://api.applicationinsights.io/v1/apps/{APP_ID}/query"
# Requête KQL
KQL_QUERY = """
union isfuzzy=true
availabilityResults,
requests,
exceptions,
pageViews,
traces,
customEvents,
dependencies
| where timestamp > ago(30d)
| where * has "IncorrectPrediction"
| project
timestamp,
name,
translated_text = tostring(customDimensions.translated_text),
sentiment = tostring(customDimensions.sentiment),
correct_sentiment = case(
tostring(customDimensions.sentiment) == "Positif",
1,
tostring(customDimensions.sentiment) == "Négatif",
0,
-1
) // -1 pour gérer les valeurs non définies ou autres cas
| order by timestamp desc
| take 1000
"""
# Période de temps pour les données (les 7 derniers jours, par exemple - si nécessaire il y a un filtre timestamp KQL)
start_time = (datetime.utcnow() - timedelta(days=7)).strftime('%Y-%m-%dT%H:%M:%SZ')
end_time = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
# Corps de la requête
params = {
"query": KQL_QUERY,
"timespan": f"{start_time}/{end_time}"
}
# En-têtes de la requête
headers = {
"x-api-key": API_KEY
}
# Envoyer la requête
response = requests.get(BASE_URL, headers=headers, params=params)
# Vérifier la réponse
if response.status_code == 200:
data = response.json()
# Extraire les résultats sous forme de DataFrame
results = pd.DataFrame(data.get("tables", [])[0].get("rows", []),
columns=[col["name"] for col in data["tables"][0]["columns"]])
display(results)
else:
print(f"Erreur {response.status_code}: {response.text}")
| timestamp | name | translated_text | sentiment | correct_sentiment | |
|---|---|---|---|---|---|
| 0 | 2024-11-27T20:11:35.748936Z | IncorrectPrediction | I loved flying with luxair | Positif | 1 |
| 1 | 2024-11-27T20:11:35.748936Z | IncorrectPrediction | I loved flying with luxair | Positif | 1 |
| 2 | 2024-11-27T20:11:35.748936Z | IncorrectPrediction | I loved flying with luxair | Positif | 1 |
| 3 | 2024-11-27T20:11:35.748936Z | IncorrectPrediction | I loved flying with luxair | Positif | 1 |
| 4 | 2024-11-27T20:11:35.748936Z | IncorrectPrediction | I loved flying with luxair | Positif | 1 |
| ... | ... | ... | ... | ... | ... |
| 82 | 2024-11-25T09:14:03.472753Z | IncorrectPrediction | Can’t believe the food on this flight! <i>Deli... | Négatif | 0 |
| 83 | 2024-11-25T09:13:59.168979Z | IncorrectPrediction | Flight was delayed for hours and there was zer... | Négatif | 0 |
| 84 | 2024-11-25T09:13:55.334365Z | IncorrectPrediction | I loved flying with luxair | Positif | 1 |
| 85 | 2024-11-25T09:13:51.055121Z | IncorrectPrediction | I loved flying with luxair | Positif | 1 |
| 86 | 2024-11-25T09:11:02.796149Z | IncorrectPrediction | My flight with @AirlineName was delayed by hou... | Négatif | 0 |
87 rows × 5 columns
import pandas as pd
import matplotlib.pyplot as plt
# Assurez-vous que le champ 'timestamp' est au format datetime
results['timestamp'] = pd.to_datetime(results['timestamp'])
# Filtrer les prédictions incorrectes (correct_sentiment == 0)
incorrect_predictions = results[results['correct_sentiment'] == 0]
# Créer des tranches de 5 minutes
incorrect_predictions.set_index('timestamp', inplace=True)
incorrect_predictions_resampled = incorrect_predictions.resample('5min').size().cumsum()
# Plot
plt.figure(figsize=(10, 6))
plt.plot(incorrect_predictions_resampled.index, incorrect_predictions_resampled.values, marker='o')
# Ajouter une ligne horizontale à 50
plt.axhline(y=50, color='red', linestyle='--', label='Limite de validité du modèle')
# Titre et labels
plt.title('Nombre cumulé d\'erreurs de prédiction par tranche de 5 minutes')
plt.xlabel('Temps')
plt.ylabel('Nombre d\'erreurs cumulées')
# Affichage de la légende
plt.legend()
# Formatage du graphique
plt.xticks(rotation=45)
plt.grid(True)
plt.tight_layout()
plt.show()
import os
from datetime import datetime
# Chemin vers le fichier notebook
notebook_path = "P7_approche_classique.ipynb"
# Obtenir la dernière date de modification
last_saved = datetime.fromtimestamp(os.path.getmtime(notebook_path))
print(f"La dernière sauvegarde du notebook est : {last_saved}")
La dernière sauvegarde du notebook est : 2024-12-07 21:04:20.874727